var/home/core/zuul-output/0000755000175000017500000000000015066617321014534 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015066643250015500 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005625170115066643237017720 0ustar rootrootSep 30 00:09:07 crc systemd[1]: Starting Kubernetes Kubelet... Sep 30 00:09:07 crc restorecon[4735]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 00:09:07 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 00:09:08 crc restorecon[4735]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 00:09:08 crc restorecon[4735]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Sep 30 00:09:09 crc kubenswrapper[4809]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 00:09:09 crc kubenswrapper[4809]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Sep 30 00:09:09 crc kubenswrapper[4809]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 00:09:09 crc kubenswrapper[4809]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 00:09:09 crc kubenswrapper[4809]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Sep 30 00:09:09 crc kubenswrapper[4809]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.412159 4809 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.424796 4809 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.424851 4809 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.424866 4809 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.424878 4809 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.424891 4809 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.424908 4809 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.424925 4809 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.424939 4809 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.424951 4809 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.424966 4809 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.424981 4809 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.424996 4809 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425011 4809 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425023 4809 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425035 4809 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425047 4809 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425058 4809 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425070 4809 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425081 4809 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425092 4809 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425103 4809 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425114 4809 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425125 4809 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425139 4809 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425150 4809 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425162 4809 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425172 4809 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425183 4809 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425194 4809 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425205 4809 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425216 4809 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425235 4809 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425246 4809 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425258 4809 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425269 4809 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425283 4809 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425294 4809 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425305 4809 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425318 4809 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425329 4809 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425340 4809 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425350 4809 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425361 4809 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425371 4809 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425382 4809 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425393 4809 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425404 4809 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425414 4809 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425425 4809 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425437 4809 feature_gate.go:330] unrecognized feature gate: Example Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425449 4809 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425460 4809 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425470 4809 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425481 4809 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425492 4809 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425503 4809 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425513 4809 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425525 4809 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425536 4809 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425547 4809 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425558 4809 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425569 4809 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425580 4809 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425590 4809 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425601 4809 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425613 4809 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425624 4809 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425682 4809 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425697 4809 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425708 4809 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.425722 4809 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427109 4809 flags.go:64] FLAG: --address="0.0.0.0" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427143 4809 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427168 4809 flags.go:64] FLAG: --anonymous-auth="true" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427184 4809 flags.go:64] FLAG: --application-metrics-count-limit="100" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427202 4809 flags.go:64] FLAG: --authentication-token-webhook="false" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427215 4809 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427232 4809 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427248 4809 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427260 4809 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427274 4809 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427289 4809 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427303 4809 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427316 4809 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427329 4809 flags.go:64] FLAG: --cgroup-root="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427342 4809 flags.go:64] FLAG: --cgroups-per-qos="true" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427355 4809 flags.go:64] FLAG: --client-ca-file="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427368 4809 flags.go:64] FLAG: --cloud-config="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427380 4809 flags.go:64] FLAG: --cloud-provider="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427392 4809 flags.go:64] FLAG: --cluster-dns="[]" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427407 4809 flags.go:64] FLAG: --cluster-domain="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427420 4809 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427434 4809 flags.go:64] FLAG: --config-dir="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427447 4809 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427462 4809 flags.go:64] FLAG: --container-log-max-files="5" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427478 4809 flags.go:64] FLAG: --container-log-max-size="10Mi" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427491 4809 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427504 4809 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427517 4809 flags.go:64] FLAG: --containerd-namespace="k8s.io" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427532 4809 flags.go:64] FLAG: --contention-profiling="false" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427545 4809 flags.go:64] FLAG: --cpu-cfs-quota="true" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427558 4809 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427572 4809 flags.go:64] FLAG: --cpu-manager-policy="none" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427585 4809 flags.go:64] FLAG: --cpu-manager-policy-options="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427601 4809 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427614 4809 flags.go:64] FLAG: --enable-controller-attach-detach="true" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427627 4809 flags.go:64] FLAG: --enable-debugging-handlers="true" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427673 4809 flags.go:64] FLAG: --enable-load-reader="false" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427687 4809 flags.go:64] FLAG: --enable-server="true" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427700 4809 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427717 4809 flags.go:64] FLAG: --event-burst="100" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427731 4809 flags.go:64] FLAG: --event-qps="50" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427744 4809 flags.go:64] FLAG: --event-storage-age-limit="default=0" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427757 4809 flags.go:64] FLAG: --event-storage-event-limit="default=0" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427770 4809 flags.go:64] FLAG: --eviction-hard="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427786 4809 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427798 4809 flags.go:64] FLAG: --eviction-minimum-reclaim="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427813 4809 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427827 4809 flags.go:64] FLAG: --eviction-soft="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427840 4809 flags.go:64] FLAG: --eviction-soft-grace-period="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427853 4809 flags.go:64] FLAG: --exit-on-lock-contention="false" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427865 4809 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427878 4809 flags.go:64] FLAG: --experimental-mounter-path="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427890 4809 flags.go:64] FLAG: --fail-cgroupv1="false" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427902 4809 flags.go:64] FLAG: --fail-swap-on="true" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427915 4809 flags.go:64] FLAG: --feature-gates="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427930 4809 flags.go:64] FLAG: --file-check-frequency="20s" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427944 4809 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427956 4809 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427969 4809 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427982 4809 flags.go:64] FLAG: --healthz-port="10248" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.427995 4809 flags.go:64] FLAG: --help="false" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428009 4809 flags.go:64] FLAG: --hostname-override="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428021 4809 flags.go:64] FLAG: --housekeeping-interval="10s" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428033 4809 flags.go:64] FLAG: --http-check-frequency="20s" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428046 4809 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428058 4809 flags.go:64] FLAG: --image-credential-provider-config="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428071 4809 flags.go:64] FLAG: --image-gc-high-threshold="85" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428084 4809 flags.go:64] FLAG: --image-gc-low-threshold="80" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428096 4809 flags.go:64] FLAG: --image-service-endpoint="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428108 4809 flags.go:64] FLAG: --kernel-memcg-notification="false" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428121 4809 flags.go:64] FLAG: --kube-api-burst="100" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428134 4809 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428148 4809 flags.go:64] FLAG: --kube-api-qps="50" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428160 4809 flags.go:64] FLAG: --kube-reserved="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428173 4809 flags.go:64] FLAG: --kube-reserved-cgroup="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428185 4809 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428198 4809 flags.go:64] FLAG: --kubelet-cgroups="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428210 4809 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428223 4809 flags.go:64] FLAG: --lock-file="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428235 4809 flags.go:64] FLAG: --log-cadvisor-usage="false" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428248 4809 flags.go:64] FLAG: --log-flush-frequency="5s" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428262 4809 flags.go:64] FLAG: --log-json-info-buffer-size="0" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428284 4809 flags.go:64] FLAG: --log-json-split-stream="false" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428297 4809 flags.go:64] FLAG: --log-text-info-buffer-size="0" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428310 4809 flags.go:64] FLAG: --log-text-split-stream="false" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428322 4809 flags.go:64] FLAG: --logging-format="text" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428335 4809 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428349 4809 flags.go:64] FLAG: --make-iptables-util-chains="true" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428362 4809 flags.go:64] FLAG: --manifest-url="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428374 4809 flags.go:64] FLAG: --manifest-url-header="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428390 4809 flags.go:64] FLAG: --max-housekeeping-interval="15s" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428402 4809 flags.go:64] FLAG: --max-open-files="1000000" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428430 4809 flags.go:64] FLAG: --max-pods="110" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428443 4809 flags.go:64] FLAG: --maximum-dead-containers="-1" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428455 4809 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428468 4809 flags.go:64] FLAG: --memory-manager-policy="None" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428481 4809 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428494 4809 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428508 4809 flags.go:64] FLAG: --node-ip="192.168.126.11" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428521 4809 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428549 4809 flags.go:64] FLAG: --node-status-max-images="50" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428562 4809 flags.go:64] FLAG: --node-status-update-frequency="10s" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428575 4809 flags.go:64] FLAG: --oom-score-adj="-999" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428590 4809 flags.go:64] FLAG: --pod-cidr="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428602 4809 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428620 4809 flags.go:64] FLAG: --pod-manifest-path="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428633 4809 flags.go:64] FLAG: --pod-max-pids="-1" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428677 4809 flags.go:64] FLAG: --pods-per-core="0" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428691 4809 flags.go:64] FLAG: --port="10250" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428703 4809 flags.go:64] FLAG: --protect-kernel-defaults="false" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428716 4809 flags.go:64] FLAG: --provider-id="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428728 4809 flags.go:64] FLAG: --qos-reserved="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428740 4809 flags.go:64] FLAG: --read-only-port="10255" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428753 4809 flags.go:64] FLAG: --register-node="true" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428765 4809 flags.go:64] FLAG: --register-schedulable="true" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428778 4809 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428798 4809 flags.go:64] FLAG: --registry-burst="10" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428811 4809 flags.go:64] FLAG: --registry-qps="5" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428826 4809 flags.go:64] FLAG: --reserved-cpus="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428839 4809 flags.go:64] FLAG: --reserved-memory="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428863 4809 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428876 4809 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428889 4809 flags.go:64] FLAG: --rotate-certificates="false" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428902 4809 flags.go:64] FLAG: --rotate-server-certificates="false" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428915 4809 flags.go:64] FLAG: --runonce="false" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428929 4809 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428942 4809 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428955 4809 flags.go:64] FLAG: --seccomp-default="false" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428967 4809 flags.go:64] FLAG: --serialize-image-pulls="true" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428979 4809 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.428993 4809 flags.go:64] FLAG: --storage-driver-db="cadvisor" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429005 4809 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429018 4809 flags.go:64] FLAG: --storage-driver-password="root" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429031 4809 flags.go:64] FLAG: --storage-driver-secure="false" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429043 4809 flags.go:64] FLAG: --storage-driver-table="stats" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429057 4809 flags.go:64] FLAG: --storage-driver-user="root" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429069 4809 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429083 4809 flags.go:64] FLAG: --sync-frequency="1m0s" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429095 4809 flags.go:64] FLAG: --system-cgroups="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429108 4809 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429128 4809 flags.go:64] FLAG: --system-reserved-cgroup="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429140 4809 flags.go:64] FLAG: --tls-cert-file="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429152 4809 flags.go:64] FLAG: --tls-cipher-suites="[]" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429167 4809 flags.go:64] FLAG: --tls-min-version="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429179 4809 flags.go:64] FLAG: --tls-private-key-file="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429191 4809 flags.go:64] FLAG: --topology-manager-policy="none" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429204 4809 flags.go:64] FLAG: --topology-manager-policy-options="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429216 4809 flags.go:64] FLAG: --topology-manager-scope="container" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429229 4809 flags.go:64] FLAG: --v="2" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429245 4809 flags.go:64] FLAG: --version="false" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429261 4809 flags.go:64] FLAG: --vmodule="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429276 4809 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.429289 4809 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429560 4809 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429579 4809 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429594 4809 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429607 4809 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429621 4809 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429674 4809 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429689 4809 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429702 4809 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429716 4809 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429732 4809 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429745 4809 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429757 4809 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429770 4809 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429783 4809 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429794 4809 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429806 4809 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429817 4809 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429829 4809 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429840 4809 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429851 4809 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429862 4809 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429873 4809 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429884 4809 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429896 4809 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429906 4809 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429917 4809 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429929 4809 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429940 4809 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429951 4809 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429962 4809 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429973 4809 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429984 4809 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.429995 4809 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430006 4809 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430017 4809 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430028 4809 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430040 4809 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430052 4809 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430063 4809 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430074 4809 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430085 4809 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430100 4809 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430115 4809 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430128 4809 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430140 4809 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430155 4809 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430166 4809 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430177 4809 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430189 4809 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430202 4809 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430213 4809 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430226 4809 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430237 4809 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430249 4809 feature_gate.go:330] unrecognized feature gate: Example Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430263 4809 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430276 4809 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430289 4809 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430302 4809 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430314 4809 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430325 4809 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430339 4809 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430351 4809 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430362 4809 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430374 4809 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430384 4809 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430396 4809 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430408 4809 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430419 4809 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430430 4809 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430440 4809 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.430452 4809 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.430528 4809 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.444215 4809 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.444300 4809 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444497 4809 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444532 4809 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444543 4809 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444558 4809 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444572 4809 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444581 4809 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444590 4809 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444599 4809 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444607 4809 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444615 4809 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444623 4809 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444632 4809 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444677 4809 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444688 4809 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444698 4809 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444708 4809 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444719 4809 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444728 4809 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444735 4809 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444743 4809 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444752 4809 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444760 4809 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444769 4809 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444780 4809 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444791 4809 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444801 4809 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444810 4809 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444819 4809 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444827 4809 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444836 4809 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444844 4809 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444852 4809 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444862 4809 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444870 4809 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444908 4809 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444920 4809 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444930 4809 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444942 4809 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444952 4809 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444964 4809 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444974 4809 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444985 4809 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.444994 4809 feature_gate.go:330] unrecognized feature gate: Example Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445002 4809 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445010 4809 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445018 4809 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445026 4809 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445034 4809 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445043 4809 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445050 4809 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445059 4809 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445066 4809 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445074 4809 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445082 4809 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445090 4809 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445099 4809 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445107 4809 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445114 4809 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445122 4809 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445130 4809 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445138 4809 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445145 4809 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445156 4809 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445165 4809 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445173 4809 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445181 4809 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445189 4809 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445196 4809 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445204 4809 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445211 4809 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445223 4809 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.445238 4809 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445509 4809 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445524 4809 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445536 4809 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445544 4809 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445554 4809 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445562 4809 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445570 4809 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445578 4809 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445588 4809 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445597 4809 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445605 4809 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445615 4809 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445625 4809 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445634 4809 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445677 4809 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445686 4809 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445694 4809 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445703 4809 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445710 4809 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445718 4809 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445726 4809 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445734 4809 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445741 4809 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445749 4809 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445756 4809 feature_gate.go:330] unrecognized feature gate: Example Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445767 4809 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445777 4809 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445785 4809 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445794 4809 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445804 4809 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445815 4809 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445823 4809 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445831 4809 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445840 4809 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445850 4809 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445859 4809 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445867 4809 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445875 4809 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445883 4809 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445892 4809 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445900 4809 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445908 4809 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445916 4809 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445924 4809 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445932 4809 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445940 4809 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445947 4809 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445955 4809 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445963 4809 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445971 4809 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445978 4809 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445986 4809 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.445994 4809 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.446001 4809 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.446009 4809 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.446016 4809 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.446024 4809 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.446032 4809 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.446039 4809 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.446047 4809 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.446054 4809 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.446062 4809 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.446070 4809 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.446079 4809 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.446088 4809 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.446096 4809 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.446104 4809 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.446111 4809 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.446119 4809 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.446126 4809 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.446136 4809 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.446150 4809 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.446501 4809 server.go:940] "Client rotation is on, will bootstrap in background" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.457164 4809 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.457307 4809 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.458966 4809 server.go:997] "Starting client certificate rotation" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.459010 4809 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.460896 4809 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-03 06:44:52.028030364 +0000 UTC Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.461021 4809 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1542h35m42.56701502s for next certificate rotation Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.485736 4809 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.488853 4809 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.512487 4809 log.go:25] "Validated CRI v1 runtime API" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.556248 4809 log.go:25] "Validated CRI v1 image API" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.558528 4809 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.565341 4809 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-09-30-00-04-34-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.565397 4809 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.599399 4809 manager.go:217] Machine: {Timestamp:2025-09-30 00:09:09.592802822 +0000 UTC m=+0.629052320 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:73ed9907-d828-4256-9134-2bd904afec40 BootID:d9b712b9-d1e8-41f4-ba32-d2073fd76ca6 Filesystems:[{Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:d8:73:69 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:d8:73:69 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:2a:27:97 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:3b:98:b9 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:8b:fc:68 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:df:e9:4a Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:35:c6:6f Speed:-1 Mtu:1496} {Name:eth10 MacAddress:7a:4e:49:d0:a3:48 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:22:a7:5b:a9:33:86 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.600309 4809 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.600605 4809 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.601900 4809 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.602317 4809 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.602390 4809 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.602948 4809 topology_manager.go:138] "Creating topology manager with none policy" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.602975 4809 container_manager_linux.go:303] "Creating device plugin manager" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.603992 4809 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.604066 4809 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.605084 4809 state_mem.go:36] "Initialized new in-memory state store" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.605272 4809 server.go:1245] "Using root directory" path="/var/lib/kubelet" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.610082 4809 kubelet.go:418] "Attempting to sync node with API server" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.610139 4809 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.610206 4809 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.610236 4809 kubelet.go:324] "Adding apiserver pod source" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.610272 4809 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.615922 4809 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.617121 4809 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.24:6443: connect: connection refused Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.617165 4809 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.24:6443: connect: connection refused Sep 30 00:09:09 crc kubenswrapper[4809]: E0930 00:09:09.617611 4809 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.24:6443: connect: connection refused" logger="UnhandledError" Sep 30 00:09:09 crc kubenswrapper[4809]: E0930 00:09:09.617397 4809 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.24:6443: connect: connection refused" logger="UnhandledError" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.617824 4809 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.620528 4809 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.622555 4809 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.622608 4809 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.622622 4809 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.622634 4809 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.622699 4809 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.622712 4809 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.622724 4809 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.622743 4809 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.622755 4809 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.622765 4809 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.622800 4809 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.622810 4809 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.625177 4809 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.627437 4809 server.go:1280] "Started kubelet" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.628308 4809 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.24:6443: connect: connection refused Sep 30 00:09:09 crc systemd[1]: Started Kubernetes Kubelet. Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.630483 4809 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.631108 4809 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.631981 4809 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.633001 4809 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.633167 4809 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.633248 4809 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 07:10:32.777269599 +0000 UTC Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.633314 4809 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 2215h1m23.14396136s for next certificate rotation Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.633440 4809 volume_manager.go:287] "The desired_state_of_world populator starts" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.633462 4809 volume_manager.go:289] "Starting Kubelet Volume Manager" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.633667 4809 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Sep 30 00:09:09 crc kubenswrapper[4809]: E0930 00:09:09.634176 4809 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.634530 4809 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.24:6443: connect: connection refused Sep 30 00:09:09 crc kubenswrapper[4809]: E0930 00:09:09.634619 4809 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.24:6443: connect: connection refused" logger="UnhandledError" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.634946 4809 factory.go:55] Registering systemd factory Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.635064 4809 factory.go:221] Registration of the systemd container factory successfully Sep 30 00:09:09 crc kubenswrapper[4809]: E0930 00:09:09.642992 4809 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.24:6443: connect: connection refused" interval="200ms" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.643073 4809 server.go:460] "Adding debug handlers to kubelet server" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.643494 4809 factory.go:153] Registering CRI-O factory Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.644012 4809 factory.go:221] Registration of the crio container factory successfully Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.644225 4809 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.644381 4809 factory.go:103] Registering Raw factory Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.644495 4809 manager.go:1196] Started watching for new ooms in manager Sep 30 00:09:09 crc kubenswrapper[4809]: E0930 00:09:09.647319 4809 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.129.56.24:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1869e6d91c9f94a1 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-30 00:09:09.627401377 +0000 UTC m=+0.663650795,LastTimestamp:2025-09-30 00:09:09.627401377 +0000 UTC m=+0.663650795,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.650417 4809 manager.go:319] Starting recovery of all containers Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655121 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655205 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655231 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655254 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655277 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655299 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655320 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655343 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655368 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655389 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655411 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655433 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655461 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655489 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655513 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655536 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655563 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655590 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655616 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655673 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655701 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655726 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655754 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655785 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655810 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655838 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655903 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655934 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.655964 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.658914 4809 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.658985 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659017 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659036 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659055 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659073 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659089 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659106 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659120 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659132 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659144 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659160 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659177 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659189 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659201 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659223 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659237 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659252 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659264 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659278 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659295 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659307 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659321 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659337 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659360 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659375 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659388 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659402 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659419 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659437 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659455 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659473 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659493 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659510 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659527 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659543 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659579 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659596 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659627 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659665 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659683 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659701 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659719 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659735 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659753 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659772 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659788 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659807 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659824 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659840 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659859 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659876 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659893 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659909 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659925 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659944 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659959 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659976 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.659991 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660008 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660025 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660042 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660058 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660075 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660101 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660118 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660137 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660152 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660168 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660186 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660202 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660219 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660236 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660252 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660270 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660288 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660316 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660335 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660355 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660383 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660402 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660422 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660440 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660459 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660475 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660493 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660514 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660532 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660549 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660569 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660598 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660622 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660698 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660727 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660772 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660793 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660810 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660826 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660844 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660862 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660880 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660898 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660943 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660960 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.660980 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661016 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661033 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661083 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661101 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661122 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661138 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661173 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661190 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661206 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661223 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661239 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661255 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661274 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661290 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661327 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661339 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661349 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661360 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661372 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661415 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661428 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661439 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661470 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661482 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661493 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661527 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661539 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661551 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661564 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661590 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661666 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661684 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661718 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661771 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661784 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661795 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661809 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661821 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661859 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661870 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661881 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661892 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661902 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661933 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661944 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.661993 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662074 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662088 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662130 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662143 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662155 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662167 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662178 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662190 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662222 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662234 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662246 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662332 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662375 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662387 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662399 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662428 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662461 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662473 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662538 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662551 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662563 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662575 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662692 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662709 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662743 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662755 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662767 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662789 4809 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662813 4809 reconstruct.go:97] "Volume reconstruction finished" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.662824 4809 reconciler.go:26] "Reconciler: start to sync state" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.676139 4809 manager.go:324] Recovery completed Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.684316 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.684635 4809 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.685953 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.685985 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.685993 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.686745 4809 cpu_manager.go:225] "Starting CPU manager" policy="none" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.686780 4809 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.686814 4809 state_mem.go:36] "Initialized new in-memory state store" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.689462 4809 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.689534 4809 status_manager.go:217] "Starting to sync pod status with apiserver" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.689572 4809 kubelet.go:2335] "Starting kubelet main sync loop" Sep 30 00:09:09 crc kubenswrapper[4809]: E0930 00:09:09.689685 4809 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Sep 30 00:09:09 crc kubenswrapper[4809]: W0930 00:09:09.690322 4809 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.24:6443: connect: connection refused Sep 30 00:09:09 crc kubenswrapper[4809]: E0930 00:09:09.690360 4809 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.24:6443: connect: connection refused" logger="UnhandledError" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.708190 4809 policy_none.go:49] "None policy: Start" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.708963 4809 memory_manager.go:170] "Starting memorymanager" policy="None" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.708995 4809 state_mem.go:35] "Initializing new in-memory state store" Sep 30 00:09:09 crc kubenswrapper[4809]: E0930 00:09:09.735195 4809 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.763821 4809 manager.go:334] "Starting Device Plugin manager" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.763884 4809 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.763898 4809 server.go:79] "Starting device plugin registration server" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.764370 4809 eviction_manager.go:189] "Eviction manager: starting control loop" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.764392 4809 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.764598 4809 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.764694 4809 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.764707 4809 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Sep 30 00:09:09 crc kubenswrapper[4809]: E0930 00:09:09.770926 4809 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.790190 4809 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc"] Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.790341 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.792002 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.792058 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.792077 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.792254 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.792475 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.792541 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.793369 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.793413 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.793425 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.793607 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.793701 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.793737 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.794627 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.794666 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.794690 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.794696 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.794703 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.794718 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.794892 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.795052 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.795090 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.795836 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.795876 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.795895 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.795954 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.795975 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.795984 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.796063 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.796119 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.796216 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.796119 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.796251 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.796256 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.797102 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.797106 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.797128 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.797143 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.797155 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.797173 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.797447 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.797488 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.798069 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.798097 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.798109 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:09 crc kubenswrapper[4809]: E0930 00:09:09.844827 4809 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.24:6443: connect: connection refused" interval="400ms" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.865174 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.866879 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.866950 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.866982 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.867016 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.867169 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.867244 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.867278 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.867308 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.867332 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.867361 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.867409 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.867461 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.867503 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.867542 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.867562 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.867209 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.867792 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.867887 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.867982 4809 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 00:09:09 crc kubenswrapper[4809]: E0930 00:09:09.868514 4809 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.24:6443: connect: connection refused" node="crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.968696 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.968748 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.968771 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.968787 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.968806 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.968821 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.968838 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.968851 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.968936 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.968965 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.969012 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.968990 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.968959 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.969041 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.969073 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.968869 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.969056 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.969116 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.969133 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.969169 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.969199 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.969206 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.969215 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.969260 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.969234 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.969312 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.969364 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.969387 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.969357 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 00:09:09 crc kubenswrapper[4809]: I0930 00:09:09.969484 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.069000 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.072104 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.072204 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.072241 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.072337 4809 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 00:09:10 crc kubenswrapper[4809]: E0930 00:09:10.073020 4809 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.24:6443: connect: connection refused" node="crc" Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.127844 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.136392 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.150152 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.173700 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 00:09:10 crc kubenswrapper[4809]: W0930 00:09:10.180169 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-c485b6696e55319b623fc6ed02c97c275b258b8088f4e8b1b6cee1ed689a3118 WatchSource:0}: Error finding container c485b6696e55319b623fc6ed02c97c275b258b8088f4e8b1b6cee1ed689a3118: Status 404 returned error can't find the container with id c485b6696e55319b623fc6ed02c97c275b258b8088f4e8b1b6cee1ed689a3118 Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.182487 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 30 00:09:10 crc kubenswrapper[4809]: W0930 00:09:10.182820 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-11024ac503f46f2e73c6cc5128203602a6761e1272bd705d3b18c9b396d5c336 WatchSource:0}: Error finding container 11024ac503f46f2e73c6cc5128203602a6761e1272bd705d3b18c9b396d5c336: Status 404 returned error can't find the container with id 11024ac503f46f2e73c6cc5128203602a6761e1272bd705d3b18c9b396d5c336 Sep 30 00:09:10 crc kubenswrapper[4809]: W0930 00:09:10.184554 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-69fa6545d048709f19e9dbacbfe760346d4d677fd510af46225e336ea71739a1 WatchSource:0}: Error finding container 69fa6545d048709f19e9dbacbfe760346d4d677fd510af46225e336ea71739a1: Status 404 returned error can't find the container with id 69fa6545d048709f19e9dbacbfe760346d4d677fd510af46225e336ea71739a1 Sep 30 00:09:10 crc kubenswrapper[4809]: E0930 00:09:10.246007 4809 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.24:6443: connect: connection refused" interval="800ms" Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.473702 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.475253 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.475488 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.475503 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.475538 4809 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 00:09:10 crc kubenswrapper[4809]: E0930 00:09:10.475969 4809 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.24:6443: connect: connection refused" node="crc" Sep 30 00:09:10 crc kubenswrapper[4809]: W0930 00:09:10.579579 4809 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.24:6443: connect: connection refused Sep 30 00:09:10 crc kubenswrapper[4809]: E0930 00:09:10.579716 4809 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.24:6443: connect: connection refused" logger="UnhandledError" Sep 30 00:09:10 crc kubenswrapper[4809]: W0930 00:09:10.611028 4809 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.24:6443: connect: connection refused Sep 30 00:09:10 crc kubenswrapper[4809]: E0930 00:09:10.611126 4809 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.24:6443: connect: connection refused" logger="UnhandledError" Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.629234 4809 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.24:6443: connect: connection refused Sep 30 00:09:10 crc kubenswrapper[4809]: W0930 00:09:10.693312 4809 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.24:6443: connect: connection refused Sep 30 00:09:10 crc kubenswrapper[4809]: E0930 00:09:10.693397 4809 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.24:6443: connect: connection refused" logger="UnhandledError" Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.694860 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"69fa6545d048709f19e9dbacbfe760346d4d677fd510af46225e336ea71739a1"} Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.695903 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"11024ac503f46f2e73c6cc5128203602a6761e1272bd705d3b18c9b396d5c336"} Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.696787 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c485b6696e55319b623fc6ed02c97c275b258b8088f4e8b1b6cee1ed689a3118"} Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.697784 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"1bf1ceb0eab89f2cbf371e5e7b0271931562a1177dc34b561aa43e634c1f30c2"} Sep 30 00:09:10 crc kubenswrapper[4809]: I0930 00:09:10.698464 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"9572e79c06705339fd528518417214bcd25929b99d769546805c25717547a1dd"} Sep 30 00:09:10 crc kubenswrapper[4809]: W0930 00:09:10.868449 4809 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.24:6443: connect: connection refused Sep 30 00:09:10 crc kubenswrapper[4809]: E0930 00:09:10.868551 4809 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.24:6443: connect: connection refused" logger="UnhandledError" Sep 30 00:09:11 crc kubenswrapper[4809]: E0930 00:09:11.046959 4809 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.24:6443: connect: connection refused" interval="1.6s" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.276549 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.278764 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.278837 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.278859 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.278898 4809 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 00:09:11 crc kubenswrapper[4809]: E0930 00:09:11.279547 4809 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.24:6443: connect: connection refused" node="crc" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.629926 4809 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.24:6443: connect: connection refused Sep 30 00:09:11 crc kubenswrapper[4809]: E0930 00:09:11.702006 4809 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.129.56.24:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1869e6d91c9f94a1 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-30 00:09:09.627401377 +0000 UTC m=+0.663650795,LastTimestamp:2025-09-30 00:09:09.627401377 +0000 UTC m=+0.663650795,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.705114 4809 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0" exitCode=0 Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.705233 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0"} Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.705378 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.706797 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.706844 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.706864 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.707095 4809 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="2601098d0af38dd0a4b31569e92bbb352ccb63b452c2ee8b91d20485ca96b4b1" exitCode=0 Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.707174 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"2601098d0af38dd0a4b31569e92bbb352ccb63b452c2ee8b91d20485ca96b4b1"} Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.707311 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.709636 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.709723 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.709741 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.710747 4809 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb" exitCode=0 Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.710803 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb"} Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.710907 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.711975 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.711997 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.712050 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.713217 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.713919 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.713954 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.713963 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.714858 4809 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="b602e4c0621a5285e8c596f35a817f599b5eda5bfdf69abb8cce7407e4167ae9" exitCode=0 Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.714947 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"b602e4c0621a5285e8c596f35a817f599b5eda5bfdf69abb8cce7407e4167ae9"} Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.715022 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.716201 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.716229 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.716238 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.718699 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b"} Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.718739 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c"} Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.718758 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f"} Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.718771 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554"} Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.718787 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.719876 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.719931 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:11 crc kubenswrapper[4809]: I0930 00:09:11.719950 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:12 crc kubenswrapper[4809]: W0930 00:09:12.181739 4809 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.24:6443: connect: connection refused Sep 30 00:09:12 crc kubenswrapper[4809]: E0930 00:09:12.181860 4809 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.24:6443: connect: connection refused" logger="UnhandledError" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.629975 4809 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.24:6443: connect: connection refused Sep 30 00:09:12 crc kubenswrapper[4809]: E0930 00:09:12.648030 4809 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.24:6443: connect: connection refused" interval="3.2s" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.723925 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e"} Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.723972 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80"} Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.723981 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5"} Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.723993 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9"} Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.726032 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"41030661a6009fd19ff2d46ec204a32d61d9b063c20f55e183b961611cd93eb2"} Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.726058 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"a9432f80d6c9f58d48e1598fe727a0c255f16396ff5895cc9a7f9c95774e1c47"} Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.726068 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e77001c72a4c411736ea00880004e17f60ba4384f43da67cdd6a20fb9352add2"} Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.726110 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.727044 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.727087 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.727101 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.728248 4809 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa" exitCode=0 Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.728291 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa"} Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.728341 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.729146 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.729169 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.729178 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.731841 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.731841 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.731830 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"92d685d32117afda939eef5a1bb4c114704d0036cb43ce2fc5cddd9ae0396960"} Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.734407 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.734436 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.734447 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.736329 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.736422 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.736435 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.879931 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.881680 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.881717 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.881726 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:12 crc kubenswrapper[4809]: I0930 00:09:12.881750 4809 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 00:09:12 crc kubenswrapper[4809]: E0930 00:09:12.882166 4809 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.24:6443: connect: connection refused" node="crc" Sep 30 00:09:13 crc kubenswrapper[4809]: W0930 00:09:13.129362 4809 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.24:6443: connect: connection refused Sep 30 00:09:13 crc kubenswrapper[4809]: E0930 00:09:13.129511 4809 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.24:6443: connect: connection refused" logger="UnhandledError" Sep 30 00:09:13 crc kubenswrapper[4809]: I0930 00:09:13.739796 4809 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75" exitCode=0 Sep 30 00:09:13 crc kubenswrapper[4809]: I0930 00:09:13.739890 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75"} Sep 30 00:09:13 crc kubenswrapper[4809]: I0930 00:09:13.740007 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:13 crc kubenswrapper[4809]: I0930 00:09:13.742415 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:13 crc kubenswrapper[4809]: I0930 00:09:13.742462 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:13 crc kubenswrapper[4809]: I0930 00:09:13.742485 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:13 crc kubenswrapper[4809]: I0930 00:09:13.747132 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3a5ad40a7e4ecbd2d0778e9f1066819f51e7611409ffc816d48d93efebcc125c"} Sep 30 00:09:13 crc kubenswrapper[4809]: I0930 00:09:13.747223 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:13 crc kubenswrapper[4809]: I0930 00:09:13.747242 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 00:09:13 crc kubenswrapper[4809]: I0930 00:09:13.747184 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:13 crc kubenswrapper[4809]: I0930 00:09:13.747303 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:13 crc kubenswrapper[4809]: I0930 00:09:13.749173 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:13 crc kubenswrapper[4809]: I0930 00:09:13.749225 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:13 crc kubenswrapper[4809]: I0930 00:09:13.749250 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:13 crc kubenswrapper[4809]: I0930 00:09:13.749351 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:13 crc kubenswrapper[4809]: I0930 00:09:13.749404 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:13 crc kubenswrapper[4809]: I0930 00:09:13.749422 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:13 crc kubenswrapper[4809]: I0930 00:09:13.749822 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:13 crc kubenswrapper[4809]: I0930 00:09:13.749916 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:13 crc kubenswrapper[4809]: I0930 00:09:13.749968 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:14 crc kubenswrapper[4809]: I0930 00:09:14.753632 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c"} Sep 30 00:09:14 crc kubenswrapper[4809]: I0930 00:09:14.753705 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7"} Sep 30 00:09:14 crc kubenswrapper[4809]: I0930 00:09:14.753722 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d"} Sep 30 00:09:14 crc kubenswrapper[4809]: I0930 00:09:14.753724 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:14 crc kubenswrapper[4809]: I0930 00:09:14.753751 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:14 crc kubenswrapper[4809]: I0930 00:09:14.753869 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 00:09:14 crc kubenswrapper[4809]: I0930 00:09:14.754876 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:14 crc kubenswrapper[4809]: I0930 00:09:14.754911 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:14 crc kubenswrapper[4809]: I0930 00:09:14.754920 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:14 crc kubenswrapper[4809]: I0930 00:09:14.754944 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:14 crc kubenswrapper[4809]: I0930 00:09:14.754978 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:14 crc kubenswrapper[4809]: I0930 00:09:14.754999 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:15 crc kubenswrapper[4809]: I0930 00:09:15.763170 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b"} Sep 30 00:09:15 crc kubenswrapper[4809]: I0930 00:09:15.763231 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:15 crc kubenswrapper[4809]: I0930 00:09:15.763273 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:15 crc kubenswrapper[4809]: I0930 00:09:15.763255 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118"} Sep 30 00:09:15 crc kubenswrapper[4809]: I0930 00:09:15.766365 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:15 crc kubenswrapper[4809]: I0930 00:09:15.766702 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:15 crc kubenswrapper[4809]: I0930 00:09:15.766737 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:15 crc kubenswrapper[4809]: I0930 00:09:15.766652 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:15 crc kubenswrapper[4809]: I0930 00:09:15.766777 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:15 crc kubenswrapper[4809]: I0930 00:09:15.766821 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:16 crc kubenswrapper[4809]: I0930 00:09:16.083332 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:16 crc kubenswrapper[4809]: I0930 00:09:16.085025 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:16 crc kubenswrapper[4809]: I0930 00:09:16.085085 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:16 crc kubenswrapper[4809]: I0930 00:09:16.085109 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:16 crc kubenswrapper[4809]: I0930 00:09:16.085153 4809 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 00:09:16 crc kubenswrapper[4809]: I0930 00:09:16.766348 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:16 crc kubenswrapper[4809]: I0930 00:09:16.767803 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:16 crc kubenswrapper[4809]: I0930 00:09:16.767843 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:16 crc kubenswrapper[4809]: I0930 00:09:16.767859 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:17 crc kubenswrapper[4809]: I0930 00:09:17.156847 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 00:09:17 crc kubenswrapper[4809]: I0930 00:09:17.157138 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:17 crc kubenswrapper[4809]: I0930 00:09:17.159843 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:17 crc kubenswrapper[4809]: I0930 00:09:17.160747 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:17 crc kubenswrapper[4809]: I0930 00:09:17.160854 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:17 crc kubenswrapper[4809]: I0930 00:09:17.387905 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 00:09:17 crc kubenswrapper[4809]: I0930 00:09:17.388115 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:17 crc kubenswrapper[4809]: I0930 00:09:17.389430 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:17 crc kubenswrapper[4809]: I0930 00:09:17.389488 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:17 crc kubenswrapper[4809]: I0930 00:09:17.389506 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:18 crc kubenswrapper[4809]: I0930 00:09:18.041465 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 00:09:18 crc kubenswrapper[4809]: I0930 00:09:18.041677 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:18 crc kubenswrapper[4809]: I0930 00:09:18.043445 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:18 crc kubenswrapper[4809]: I0930 00:09:18.043492 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:18 crc kubenswrapper[4809]: I0930 00:09:18.043505 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:18 crc kubenswrapper[4809]: I0930 00:09:18.048933 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 00:09:18 crc kubenswrapper[4809]: I0930 00:09:18.393781 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 00:09:18 crc kubenswrapper[4809]: I0930 00:09:18.393997 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:18 crc kubenswrapper[4809]: I0930 00:09:18.395464 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:18 crc kubenswrapper[4809]: I0930 00:09:18.395534 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:18 crc kubenswrapper[4809]: I0930 00:09:18.395557 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:18 crc kubenswrapper[4809]: I0930 00:09:18.771358 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:18 crc kubenswrapper[4809]: I0930 00:09:18.772915 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:18 crc kubenswrapper[4809]: I0930 00:09:18.772965 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:18 crc kubenswrapper[4809]: I0930 00:09:18.772983 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:18 crc kubenswrapper[4809]: I0930 00:09:18.986291 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Sep 30 00:09:18 crc kubenswrapper[4809]: I0930 00:09:18.986591 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:18 crc kubenswrapper[4809]: I0930 00:09:18.988142 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:18 crc kubenswrapper[4809]: I0930 00:09:18.988175 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:18 crc kubenswrapper[4809]: I0930 00:09:18.988186 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:19 crc kubenswrapper[4809]: I0930 00:09:19.072292 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Sep 30 00:09:19 crc kubenswrapper[4809]: E0930 00:09:19.771323 4809 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 30 00:09:19 crc kubenswrapper[4809]: I0930 00:09:19.773308 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:19 crc kubenswrapper[4809]: I0930 00:09:19.774679 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:19 crc kubenswrapper[4809]: I0930 00:09:19.774741 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:19 crc kubenswrapper[4809]: I0930 00:09:19.774763 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:20 crc kubenswrapper[4809]: I0930 00:09:20.861744 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 00:09:20 crc kubenswrapper[4809]: I0930 00:09:20.861921 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:20 crc kubenswrapper[4809]: I0930 00:09:20.864808 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:20 crc kubenswrapper[4809]: I0930 00:09:20.864875 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:20 crc kubenswrapper[4809]: I0930 00:09:20.864886 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:20 crc kubenswrapper[4809]: I0930 00:09:20.869191 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 00:09:21 crc kubenswrapper[4809]: I0930 00:09:21.475545 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 00:09:21 crc kubenswrapper[4809]: I0930 00:09:21.778956 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:21 crc kubenswrapper[4809]: I0930 00:09:21.780218 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:21 crc kubenswrapper[4809]: I0930 00:09:21.780269 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:21 crc kubenswrapper[4809]: I0930 00:09:21.780287 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:22 crc kubenswrapper[4809]: I0930 00:09:22.782201 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:22 crc kubenswrapper[4809]: I0930 00:09:22.783515 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:22 crc kubenswrapper[4809]: I0930 00:09:22.783576 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:22 crc kubenswrapper[4809]: I0930 00:09:22.783600 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:23 crc kubenswrapper[4809]: I0930 00:09:23.354033 4809 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:37466->192.168.126.11:17697: read: connection reset by peer" start-of-body= Sep 30 00:09:23 crc kubenswrapper[4809]: I0930 00:09:23.354147 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:37466->192.168.126.11:17697: read: connection reset by peer" Sep 30 00:09:23 crc kubenswrapper[4809]: W0930 00:09:23.360326 4809 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 30 00:09:23 crc kubenswrapper[4809]: I0930 00:09:23.360453 4809 trace.go:236] Trace[1833922410]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 00:09:13.358) (total time: 10001ms): Sep 30 00:09:23 crc kubenswrapper[4809]: Trace[1833922410]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (00:09:23.360) Sep 30 00:09:23 crc kubenswrapper[4809]: Trace[1833922410]: [10.00191669s] [10.00191669s] END Sep 30 00:09:23 crc kubenswrapper[4809]: E0930 00:09:23.360488 4809 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 30 00:09:23 crc kubenswrapper[4809]: I0930 00:09:23.630344 4809 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Sep 30 00:09:23 crc kubenswrapper[4809]: W0930 00:09:23.633598 4809 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 30 00:09:23 crc kubenswrapper[4809]: I0930 00:09:23.633727 4809 trace.go:236] Trace[228621432]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 00:09:13.631) (total time: 10001ms): Sep 30 00:09:23 crc kubenswrapper[4809]: Trace[228621432]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (00:09:23.633) Sep 30 00:09:23 crc kubenswrapper[4809]: Trace[228621432]: [10.001873388s] [10.001873388s] END Sep 30 00:09:23 crc kubenswrapper[4809]: E0930 00:09:23.633751 4809 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 30 00:09:23 crc kubenswrapper[4809]: I0930 00:09:23.786484 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 30 00:09:23 crc kubenswrapper[4809]: I0930 00:09:23.788960 4809 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3a5ad40a7e4ecbd2d0778e9f1066819f51e7611409ffc816d48d93efebcc125c" exitCode=255 Sep 30 00:09:23 crc kubenswrapper[4809]: I0930 00:09:23.789009 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"3a5ad40a7e4ecbd2d0778e9f1066819f51e7611409ffc816d48d93efebcc125c"} Sep 30 00:09:23 crc kubenswrapper[4809]: I0930 00:09:23.789144 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:23 crc kubenswrapper[4809]: I0930 00:09:23.790111 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:23 crc kubenswrapper[4809]: I0930 00:09:23.790147 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:23 crc kubenswrapper[4809]: I0930 00:09:23.790156 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:23 crc kubenswrapper[4809]: I0930 00:09:23.790694 4809 scope.go:117] "RemoveContainer" containerID="3a5ad40a7e4ecbd2d0778e9f1066819f51e7611409ffc816d48d93efebcc125c" Sep 30 00:09:24 crc kubenswrapper[4809]: I0930 00:09:24.475579 4809 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 30 00:09:24 crc kubenswrapper[4809]: I0930 00:09:24.475715 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 30 00:09:24 crc kubenswrapper[4809]: I0930 00:09:24.549221 4809 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\": RBAC: [clusterrole.rbac.authorization.k8s.io \"system:public-info-viewer\" not found, clusterrole.rbac.authorization.k8s.io \"system:openshift:public-info-viewer\" not found]","reason":"Forbidden","details":{},"code":403} Sep 30 00:09:24 crc kubenswrapper[4809]: I0930 00:09:24.549295 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 30 00:09:24 crc kubenswrapper[4809]: I0930 00:09:24.564208 4809 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]log ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]etcd ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/openshift.io-api-request-count-filter ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/openshift.io-startkubeinformers ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/start-apiserver-admission-initializer ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/generic-apiserver-start-informers ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/priority-and-fairness-config-consumer ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/priority-and-fairness-filter ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/storage-object-count-tracker-hook ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/start-apiextensions-informers ok Sep 30 00:09:24 crc kubenswrapper[4809]: [-]poststarthook/start-apiextensions-controllers failed: reason withheld Sep 30 00:09:24 crc kubenswrapper[4809]: [-]poststarthook/crd-informer-synced failed: reason withheld Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/start-system-namespaces-controller ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/start-cluster-authentication-info-controller ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/start-legacy-token-tracking-controller ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/start-service-ip-repair-controllers ok Sep 30 00:09:24 crc kubenswrapper[4809]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Sep 30 00:09:24 crc kubenswrapper[4809]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Sep 30 00:09:24 crc kubenswrapper[4809]: [-]poststarthook/priority-and-fairness-config-producer failed: reason withheld Sep 30 00:09:24 crc kubenswrapper[4809]: [-]poststarthook/bootstrap-controller failed: reason withheld Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/start-kube-aggregator-informers ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/apiservice-status-local-available-controller ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/apiservice-status-remote-available-controller ok Sep 30 00:09:24 crc kubenswrapper[4809]: [-]poststarthook/apiservice-registration-controller failed: reason withheld Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/apiservice-wait-for-first-sync ok Sep 30 00:09:24 crc kubenswrapper[4809]: [-]poststarthook/apiservice-discovery-controller failed: reason withheld Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/kube-apiserver-autoregistration ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]autoregister-completion ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/apiservice-openapi-controller ok Sep 30 00:09:24 crc kubenswrapper[4809]: [+]poststarthook/apiservice-openapiv3-controller ok Sep 30 00:09:24 crc kubenswrapper[4809]: livez check failed Sep 30 00:09:24 crc kubenswrapper[4809]: I0930 00:09:24.564287 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 00:09:24 crc kubenswrapper[4809]: I0930 00:09:24.795145 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 30 00:09:24 crc kubenswrapper[4809]: I0930 00:09:24.797309 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864"} Sep 30 00:09:24 crc kubenswrapper[4809]: I0930 00:09:24.797454 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:24 crc kubenswrapper[4809]: I0930 00:09:24.798280 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:24 crc kubenswrapper[4809]: I0930 00:09:24.798308 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:24 crc kubenswrapper[4809]: I0930 00:09:24.798320 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:27 crc kubenswrapper[4809]: I0930 00:09:27.396761 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 00:09:27 crc kubenswrapper[4809]: I0930 00:09:27.397001 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:27 crc kubenswrapper[4809]: I0930 00:09:27.397145 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 00:09:27 crc kubenswrapper[4809]: I0930 00:09:27.398461 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:27 crc kubenswrapper[4809]: I0930 00:09:27.398541 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:27 crc kubenswrapper[4809]: I0930 00:09:27.398556 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:27 crc kubenswrapper[4809]: I0930 00:09:27.405526 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 00:09:27 crc kubenswrapper[4809]: I0930 00:09:27.805140 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:27 crc kubenswrapper[4809]: I0930 00:09:27.806561 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:27 crc kubenswrapper[4809]: I0930 00:09:27.806629 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:27 crc kubenswrapper[4809]: I0930 00:09:27.806712 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:28 crc kubenswrapper[4809]: I0930 00:09:28.651591 4809 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Sep 30 00:09:28 crc kubenswrapper[4809]: I0930 00:09:28.807857 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:28 crc kubenswrapper[4809]: I0930 00:09:28.809124 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:28 crc kubenswrapper[4809]: I0930 00:09:28.809197 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:28 crc kubenswrapper[4809]: I0930 00:09:28.809221 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.029442 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.029631 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.030820 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.030851 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.030865 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.048100 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.263294 4809 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Sep 30 00:09:29 crc kubenswrapper[4809]: E0930 00:09:29.541840 4809 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.544285 4809 trace.go:236] Trace[620608322]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 00:09:17.424) (total time: 12119ms): Sep 30 00:09:29 crc kubenswrapper[4809]: Trace[620608322]: ---"Objects listed" error: 12119ms (00:09:29.544) Sep 30 00:09:29 crc kubenswrapper[4809]: Trace[620608322]: [12.119502375s] [12.119502375s] END Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.544321 4809 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.545772 4809 trace.go:236] Trace[1343624900]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 00:09:17.493) (total time: 12052ms): Sep 30 00:09:29 crc kubenswrapper[4809]: Trace[1343624900]: ---"Objects listed" error: 12052ms (00:09:29.545) Sep 30 00:09:29 crc kubenswrapper[4809]: Trace[1343624900]: [12.052354721s] [12.052354721s] END Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.545804 4809 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.548200 4809 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Sep 30 00:09:29 crc kubenswrapper[4809]: E0930 00:09:29.548886 4809 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.624384 4809 apiserver.go:52] "Watching apiserver" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.627596 4809 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.627913 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.628304 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.628367 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:29 crc kubenswrapper[4809]: E0930 00:09:29.628478 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.628531 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.628601 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 00:09:29 crc kubenswrapper[4809]: E0930 00:09:29.628687 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.628811 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.629024 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:29 crc kubenswrapper[4809]: E0930 00:09:29.629084 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.630121 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.630518 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.630793 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.631278 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.631612 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.632698 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.632988 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.634180 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.634577 4809 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.634817 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.649191 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.649743 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.649914 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.650186 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.651331 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.651575 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.649699 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.651790 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.650058 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.650141 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.651168 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.651878 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.651900 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.651893 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.651930 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.651960 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.651989 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652015 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652036 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652059 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652082 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652106 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652172 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652195 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652192 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652219 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652269 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652291 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652312 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652333 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652354 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652375 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652395 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652416 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652439 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652462 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652510 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652531 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652550 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652573 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652594 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652613 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652702 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652724 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652745 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652768 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652791 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652813 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652840 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652869 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652895 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652923 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652948 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652972 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652991 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653042 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653062 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653081 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653115 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653135 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653160 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653185 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653209 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653230 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653252 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653272 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653294 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653319 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653339 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653361 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653384 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653407 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653427 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653448 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653469 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653489 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653513 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653542 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653565 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653588 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653613 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653655 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653684 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653707 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653727 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653750 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653771 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653790 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653812 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653833 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653854 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653894 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653919 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653944 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653968 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653993 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654015 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654036 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654057 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654077 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654099 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654121 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654142 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654165 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654188 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654213 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654237 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654262 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654287 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654314 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654337 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654361 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654391 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654415 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654441 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654463 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654485 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654509 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654533 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654556 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654579 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654602 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654623 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654667 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654697 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654723 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654746 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654768 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654792 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654828 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654853 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654875 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654901 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654924 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654950 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654979 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655002 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655024 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655047 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655089 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655111 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655136 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655160 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655185 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655208 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655231 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655255 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655280 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655304 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655330 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655356 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655456 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655483 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655508 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655531 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655557 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655580 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655603 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655627 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655675 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655700 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655732 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655757 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655792 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655820 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655847 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655871 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655894 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655916 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655939 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655960 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655984 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656006 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656032 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656059 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656083 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656110 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656136 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656160 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656185 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656208 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656232 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656254 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656278 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656300 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656325 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656348 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656372 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656394 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656419 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656445 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656472 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656496 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656519 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656544 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656567 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656591 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656617 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656660 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656685 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656712 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652391 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656739 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656790 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652427 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652547 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652754 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652836 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652890 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.652906 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653122 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653204 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653211 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653503 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.657115 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.657528 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.657659 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.657695 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.657721 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656822 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.657813 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.657821 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.657847 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658093 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658130 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658151 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658172 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658201 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658225 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658245 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658263 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658287 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658319 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658388 4809 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658406 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658423 4809 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658435 4809 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658448 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658463 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658477 4809 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658490 4809 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658504 4809 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658517 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658533 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658546 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658560 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658573 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658587 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658600 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658615 4809 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.659224 4809 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.659765 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658286 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658595 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653555 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653594 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653847 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653849 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653895 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654164 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654171 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654209 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654397 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654672 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654779 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.654893 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655289 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655580 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655686 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.655994 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656047 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656060 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656391 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656452 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656547 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.656559 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.658926 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.659264 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: E0930 00:09:29.659474 4809 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 00:09:29 crc kubenswrapper[4809]: E0930 00:09:29.662146 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:30.162105195 +0000 UTC m=+21.198354613 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 00:09:29 crc kubenswrapper[4809]: E0930 00:09:29.659513 4809 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 00:09:29 crc kubenswrapper[4809]: E0930 00:09:29.662207 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:30.162197477 +0000 UTC m=+21.198446885 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.659531 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.653532 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.659819 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.660029 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.660297 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.660893 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.661171 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.661439 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.661461 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.664635 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.665620 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.665949 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.666281 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.666369 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.666411 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.666513 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.666868 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.667058 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.667566 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.667551 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.667700 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.668090 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.668218 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.668239 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.668422 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.668604 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.668636 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.668892 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.668953 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.669017 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.669215 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.669250 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.669376 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.669432 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.669373 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.669739 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.669478 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.670065 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.670089 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.670139 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.670368 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.670402 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.670738 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.670748 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.670795 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.670942 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.671097 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.671116 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.671134 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.671263 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.671302 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.671345 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.671185 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.671484 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.671631 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.671630 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.671899 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.671931 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.671975 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.672083 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.672346 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.672429 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.672499 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.672887 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.672994 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.673017 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.673168 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.673129 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.673365 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: E0930 00:09:29.673429 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:09:30.173407657 +0000 UTC m=+21.209657155 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.673507 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.673721 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.673904 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.673726 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.674157 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.674439 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.674608 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.674717 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.675263 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.675478 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.675648 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.675709 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.675878 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.675930 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.675989 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.676169 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.676202 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.676357 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.676563 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.676689 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.676702 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.676968 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.676994 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.676978 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.677289 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.677312 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.677322 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.677522 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.677835 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.678281 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.678389 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.678679 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.675568 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.679127 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.679436 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.679867 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.680824 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.681118 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.681389 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.681459 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.681785 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.682135 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.682242 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.682423 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.682654 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: E0930 00:09:29.683012 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 00:09:29 crc kubenswrapper[4809]: E0930 00:09:29.683944 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 00:09:29 crc kubenswrapper[4809]: E0930 00:09:29.683959 4809 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:29 crc kubenswrapper[4809]: E0930 00:09:29.684041 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:30.184012558 +0000 UTC m=+21.220261966 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.684042 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.684038 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.683383 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.683427 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.683673 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.683722 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.683733 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.683014 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.684296 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.684517 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.684590 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.685197 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.685360 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.685514 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.685586 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.685579 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.686778 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.688336 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.690247 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.690678 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.694112 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: E0930 00:09:29.695318 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 00:09:29 crc kubenswrapper[4809]: E0930 00:09:29.695353 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 00:09:29 crc kubenswrapper[4809]: E0930 00:09:29.695369 4809 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:29 crc kubenswrapper[4809]: E0930 00:09:29.695434 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:30.195408223 +0000 UTC m=+21.231657631 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.695870 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.696012 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.696190 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.696266 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.696518 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.700036 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.700534 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.705412 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.706467 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.707323 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.709190 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.710132 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.710699 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.711871 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.712029 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.712316 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.712648 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.713434 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.713546 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.713951 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.717171 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.717264 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.717331 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.718226 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.720167 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.721561 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.723339 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.724093 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.726448 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.727406 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.728098 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.731703 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.732332 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.733403 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.734840 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.736586 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.737292 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.738625 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.739980 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.741655 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.742932 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.744886 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.747400 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.748703 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.748922 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.750379 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.751044 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.752267 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.752925 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.754343 4809 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.754491 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.756926 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.758203 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.758827 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.758974 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759251 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759364 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759450 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759506 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759516 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759526 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759536 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759548 4809 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759559 4809 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759571 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759581 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759590 4809 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759598 4809 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759621 4809 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759652 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759653 4809 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759691 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759702 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759715 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759727 4809 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759739 4809 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759753 4809 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759766 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759778 4809 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759789 4809 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759800 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759812 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759824 4809 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759834 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759844 4809 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759857 4809 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759867 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759879 4809 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759891 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759904 4809 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759915 4809 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759938 4809 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759949 4809 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.759961 4809 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.760141 4809 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.760753 4809 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.760780 4809 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.760792 4809 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.760806 4809 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.760818 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.760828 4809 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.760839 4809 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.760850 4809 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.760860 4809 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.760868 4809 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.760876 4809 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.760886 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.760894 4809 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.760903 4809 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.760938 4809 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.760952 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761258 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761285 4809 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761297 4809 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761308 4809 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761334 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761351 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761362 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761372 4809 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761382 4809 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761392 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761402 4809 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761411 4809 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761422 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761431 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761442 4809 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761451 4809 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761460 4809 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761469 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761479 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761479 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761489 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761524 4809 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761533 4809 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761542 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761551 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761561 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761570 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761578 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761586 4809 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761596 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761606 4809 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761615 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761624 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761632 4809 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761771 4809 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761781 4809 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761789 4809 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761798 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761807 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761817 4809 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761826 4809 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761835 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761845 4809 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761854 4809 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761864 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761876 4809 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761886 4809 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761894 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761903 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761912 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761924 4809 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761932 4809 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761941 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761950 4809 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.761958 4809 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762156 4809 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762165 4809 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762173 4809 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762181 4809 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762190 4809 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762198 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762208 4809 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762216 4809 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762224 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762235 4809 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762243 4809 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762251 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762260 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762270 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762278 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762286 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762296 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762304 4809 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762314 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762323 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762333 4809 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762341 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762396 4809 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762407 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762415 4809 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762424 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762433 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762442 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762451 4809 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762459 4809 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762468 4809 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762476 4809 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762484 4809 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762493 4809 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762501 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762512 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762520 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762529 4809 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762539 4809 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762548 4809 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762558 4809 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762567 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762576 4809 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762584 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762594 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762603 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762613 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762623 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762631 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762652 4809 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762660 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762668 4809 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762677 4809 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762686 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762685 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762695 4809 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762730 4809 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762740 4809 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762750 4809 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762758 4809 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762768 4809 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762779 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762787 4809 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762795 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762804 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762812 4809 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762821 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762829 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762837 4809 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762845 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762853 4809 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.762862 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.763896 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.764735 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.766122 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.766834 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.768068 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.769402 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.770238 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.770953 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.772331 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.773489 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.774613 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.775143 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.775322 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.777068 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.777789 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.779023 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.779804 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.780759 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.784169 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.793659 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.805044 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.818329 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.831254 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.831581 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.845363 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.856182 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.871517 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.946122 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 00:09:29 crc kubenswrapper[4809]: I0930 00:09:29.966056 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 00:09:29 crc kubenswrapper[4809]: W0930 00:09:29.984073 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-c2e16ed10f92ef16413ec9161724a56f1fe2921df052ddfb38ea5a941e040df7 WatchSource:0}: Error finding container c2e16ed10f92ef16413ec9161724a56f1fe2921df052ddfb38ea5a941e040df7: Status 404 returned error can't find the container with id c2e16ed10f92ef16413ec9161724a56f1fe2921df052ddfb38ea5a941e040df7 Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.010066 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 00:09:30 crc kubenswrapper[4809]: W0930 00:09:30.022367 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-ad42bab9f19814c34e544360e197093a9909f59bffc48394227ea03d728809ec WatchSource:0}: Error finding container ad42bab9f19814c34e544360e197093a9909f59bffc48394227ea03d728809ec: Status 404 returned error can't find the container with id ad42bab9f19814c34e544360e197093a9909f59bffc48394227ea03d728809ec Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.167857 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.167915 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:30 crc kubenswrapper[4809]: E0930 00:09:30.168039 4809 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 00:09:30 crc kubenswrapper[4809]: E0930 00:09:30.168123 4809 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 00:09:30 crc kubenswrapper[4809]: E0930 00:09:30.168141 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:31.16812123 +0000 UTC m=+22.204370638 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 00:09:30 crc kubenswrapper[4809]: E0930 00:09:30.168288 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:31.168269914 +0000 UTC m=+22.204519322 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.268630 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.268788 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:30 crc kubenswrapper[4809]: E0930 00:09:30.268866 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:09:31.268832888 +0000 UTC m=+22.305082306 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.268929 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:30 crc kubenswrapper[4809]: E0930 00:09:30.269014 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 00:09:30 crc kubenswrapper[4809]: E0930 00:09:30.269034 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 00:09:30 crc kubenswrapper[4809]: E0930 00:09:30.269048 4809 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:30 crc kubenswrapper[4809]: E0930 00:09:30.269124 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:31.269108476 +0000 UTC m=+22.305357904 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:30 crc kubenswrapper[4809]: E0930 00:09:30.269142 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 00:09:30 crc kubenswrapper[4809]: E0930 00:09:30.269160 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 00:09:30 crc kubenswrapper[4809]: E0930 00:09:30.269174 4809 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:30 crc kubenswrapper[4809]: E0930 00:09:30.269219 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:31.269205119 +0000 UTC m=+22.305454617 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.818778 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a"} Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.818849 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5"} Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.818863 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"c2e16ed10f92ef16413ec9161724a56f1fe2921df052ddfb38ea5a941e040df7"} Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.820800 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"b99b40064f1f5dcc6c54988c4c8b529d668ea9d730c1e208816eed6328564150"} Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.822890 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.823569 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.825735 4809 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864" exitCode=255 Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.825765 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864"} Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.825918 4809 scope.go:117] "RemoveContainer" containerID="3a5ad40a7e4ecbd2d0778e9f1066819f51e7611409ffc816d48d93efebcc125c" Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.828217 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611"} Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.828255 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ad42bab9f19814c34e544360e197093a9909f59bffc48394227ea03d728809ec"} Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.871051 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:30Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.878682 4809 scope.go:117] "RemoveContainer" containerID="1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864" Sep 30 00:09:30 crc kubenswrapper[4809]: E0930 00:09:30.878861 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.879246 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.893477 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:30Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.921711 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:30Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.956033 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:30Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:30 crc kubenswrapper[4809]: I0930 00:09:30.975805 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:30Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.009381 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.029768 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.063117 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a5ad40a7e4ecbd2d0778e9f1066819f51e7611409ffc816d48d93efebcc125c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:23Z\\\",\\\"message\\\":\\\"W0930 00:09:12.856101 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 00:09:12.856413 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759190952 cert, and key in /tmp/serving-cert-2955562664/serving-signer.crt, /tmp/serving-cert-2955562664/serving-signer.key\\\\nI0930 00:09:13.091915 1 observer_polling.go:159] Starting file observer\\\\nW0930 00:09:13.094742 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 00:09:13.094977 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:13.096935 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2955562664/tls.crt::/tmp/serving-cert-2955562664/tls.key\\\\\\\"\\\\nF0930 00:09:23.346049 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.085455 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.089303 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-v5x64"] Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.089675 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-v5x64" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.090969 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.091776 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.091913 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.100121 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.127308 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.142265 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.157810 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.174134 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.175357 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kj5gf\" (UniqueName: \"kubernetes.io/projected/ae2cb8c5-974c-4952-920a-9f194e953d8b-kube-api-access-kj5gf\") pod \"node-resolver-v5x64\" (UID: \"ae2cb8c5-974c-4952-920a-9f194e953d8b\") " pod="openshift-dns/node-resolver-v5x64" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.175430 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.175450 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.175478 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/ae2cb8c5-974c-4952-920a-9f194e953d8b-hosts-file\") pod \"node-resolver-v5x64\" (UID: \"ae2cb8c5-974c-4952-920a-9f194e953d8b\") " pod="openshift-dns/node-resolver-v5x64" Sep 30 00:09:31 crc kubenswrapper[4809]: E0930 00:09:31.175563 4809 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 00:09:31 crc kubenswrapper[4809]: E0930 00:09:31.175674 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:33.175652156 +0000 UTC m=+24.211901744 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 00:09:31 crc kubenswrapper[4809]: E0930 00:09:31.175673 4809 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 00:09:31 crc kubenswrapper[4809]: E0930 00:09:31.175741 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:33.175723638 +0000 UTC m=+24.211973226 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.192155 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.211948 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a5ad40a7e4ecbd2d0778e9f1066819f51e7611409ffc816d48d93efebcc125c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:23Z\\\",\\\"message\\\":\\\"W0930 00:09:12.856101 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 00:09:12.856413 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759190952 cert, and key in /tmp/serving-cert-2955562664/serving-signer.crt, /tmp/serving-cert-2955562664/serving-signer.key\\\\nI0930 00:09:13.091915 1 observer_polling.go:159] Starting file observer\\\\nW0930 00:09:13.094742 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 00:09:13.094977 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:13.096935 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2955562664/tls.crt::/tmp/serving-cert-2955562664/tls.key\\\\\\\"\\\\nF0930 00:09:23.346049 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.226149 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.242467 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.253051 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.271384 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.275872 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.275945 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.275993 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/ae2cb8c5-974c-4952-920a-9f194e953d8b-hosts-file\") pod \"node-resolver-v5x64\" (UID: \"ae2cb8c5-974c-4952-920a-9f194e953d8b\") " pod="openshift-dns/node-resolver-v5x64" Sep 30 00:09:31 crc kubenswrapper[4809]: E0930 00:09:31.276014 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:09:33.275996774 +0000 UTC m=+24.312246192 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.276041 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.276068 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/ae2cb8c5-974c-4952-920a-9f194e953d8b-hosts-file\") pod \"node-resolver-v5x64\" (UID: \"ae2cb8c5-974c-4952-920a-9f194e953d8b\") " pod="openshift-dns/node-resolver-v5x64" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.276071 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kj5gf\" (UniqueName: \"kubernetes.io/projected/ae2cb8c5-974c-4952-920a-9f194e953d8b-kube-api-access-kj5gf\") pod \"node-resolver-v5x64\" (UID: \"ae2cb8c5-974c-4952-920a-9f194e953d8b\") " pod="openshift-dns/node-resolver-v5x64" Sep 30 00:09:31 crc kubenswrapper[4809]: E0930 00:09:31.276214 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 00:09:31 crc kubenswrapper[4809]: E0930 00:09:31.276265 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 00:09:31 crc kubenswrapper[4809]: E0930 00:09:31.276281 4809 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:31 crc kubenswrapper[4809]: E0930 00:09:31.276229 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 00:09:31 crc kubenswrapper[4809]: E0930 00:09:31.276409 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 00:09:31 crc kubenswrapper[4809]: E0930 00:09:31.276425 4809 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:31 crc kubenswrapper[4809]: E0930 00:09:31.276377 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:33.276355345 +0000 UTC m=+24.312604883 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:31 crc kubenswrapper[4809]: E0930 00:09:31.276498 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:33.276477028 +0000 UTC m=+24.312726426 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.296503 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.303294 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kj5gf\" (UniqueName: \"kubernetes.io/projected/ae2cb8c5-974c-4952-920a-9f194e953d8b-kube-api-access-kj5gf\") pod \"node-resolver-v5x64\" (UID: \"ae2cb8c5-974c-4952-920a-9f194e953d8b\") " pod="openshift-dns/node-resolver-v5x64" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.319918 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.360455 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.383350 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.404521 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-v5x64" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.481002 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.490155 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.502154 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.505628 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-2zlhx"] Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.505974 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.508259 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.508287 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.508620 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.512685 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.514131 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.523727 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.538613 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.542686 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.555267 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.564264 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.577696 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/526944fa-517b-47ad-abf1-75683c7f70a1-rootfs\") pod \"machine-config-daemon-2zlhx\" (UID: \"526944fa-517b-47ad-abf1-75683c7f70a1\") " pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.577731 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/526944fa-517b-47ad-abf1-75683c7f70a1-mcd-auth-proxy-config\") pod \"machine-config-daemon-2zlhx\" (UID: \"526944fa-517b-47ad-abf1-75683c7f70a1\") " pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.577748 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mn7xh\" (UniqueName: \"kubernetes.io/projected/526944fa-517b-47ad-abf1-75683c7f70a1-kube-api-access-mn7xh\") pod \"machine-config-daemon-2zlhx\" (UID: \"526944fa-517b-47ad-abf1-75683c7f70a1\") " pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.577777 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/526944fa-517b-47ad-abf1-75683c7f70a1-proxy-tls\") pod \"machine-config-daemon-2zlhx\" (UID: \"526944fa-517b-47ad-abf1-75683c7f70a1\") " pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.593660 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.609325 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.623576 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a5ad40a7e4ecbd2d0778e9f1066819f51e7611409ffc816d48d93efebcc125c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:23Z\\\",\\\"message\\\":\\\"W0930 00:09:12.856101 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 00:09:12.856413 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759190952 cert, and key in /tmp/serving-cert-2955562664/serving-signer.crt, /tmp/serving-cert-2955562664/serving-signer.key\\\\nI0930 00:09:13.091915 1 observer_polling.go:159] Starting file observer\\\\nW0930 00:09:13.094742 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 00:09:13.094977 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:13.096935 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2955562664/tls.crt::/tmp/serving-cert-2955562664/tls.key\\\\\\\"\\\\nF0930 00:09:23.346049 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.640810 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.655376 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.675748 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a5ad40a7e4ecbd2d0778e9f1066819f51e7611409ffc816d48d93efebcc125c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:23Z\\\",\\\"message\\\":\\\"W0930 00:09:12.856101 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 00:09:12.856413 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759190952 cert, and key in /tmp/serving-cert-2955562664/serving-signer.crt, /tmp/serving-cert-2955562664/serving-signer.key\\\\nI0930 00:09:13.091915 1 observer_polling.go:159] Starting file observer\\\\nW0930 00:09:13.094742 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 00:09:13.094977 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:13.096935 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2955562664/tls.crt::/tmp/serving-cert-2955562664/tls.key\\\\\\\"\\\\nF0930 00:09:23.346049 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.678956 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/526944fa-517b-47ad-abf1-75683c7f70a1-rootfs\") pod \"machine-config-daemon-2zlhx\" (UID: \"526944fa-517b-47ad-abf1-75683c7f70a1\") " pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.678997 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/526944fa-517b-47ad-abf1-75683c7f70a1-mcd-auth-proxy-config\") pod \"machine-config-daemon-2zlhx\" (UID: \"526944fa-517b-47ad-abf1-75683c7f70a1\") " pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.679014 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mn7xh\" (UniqueName: \"kubernetes.io/projected/526944fa-517b-47ad-abf1-75683c7f70a1-kube-api-access-mn7xh\") pod \"machine-config-daemon-2zlhx\" (UID: \"526944fa-517b-47ad-abf1-75683c7f70a1\") " pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.679043 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/526944fa-517b-47ad-abf1-75683c7f70a1-proxy-tls\") pod \"machine-config-daemon-2zlhx\" (UID: \"526944fa-517b-47ad-abf1-75683c7f70a1\") " pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.679125 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/526944fa-517b-47ad-abf1-75683c7f70a1-rootfs\") pod \"machine-config-daemon-2zlhx\" (UID: \"526944fa-517b-47ad-abf1-75683c7f70a1\") " pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.679855 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/526944fa-517b-47ad-abf1-75683c7f70a1-mcd-auth-proxy-config\") pod \"machine-config-daemon-2zlhx\" (UID: \"526944fa-517b-47ad-abf1-75683c7f70a1\") " pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.683226 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/526944fa-517b-47ad-abf1-75683c7f70a1-proxy-tls\") pod \"machine-config-daemon-2zlhx\" (UID: \"526944fa-517b-47ad-abf1-75683c7f70a1\") " pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.690118 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.690151 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.690132 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:31 crc kubenswrapper[4809]: E0930 00:09:31.690235 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:09:31 crc kubenswrapper[4809]: E0930 00:09:31.690335 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:09:31 crc kubenswrapper[4809]: E0930 00:09:31.690402 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.693372 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.694124 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.704681 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.714157 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mn7xh\" (UniqueName: \"kubernetes.io/projected/526944fa-517b-47ad-abf1-75683c7f70a1-kube-api-access-mn7xh\") pod \"machine-config-daemon-2zlhx\" (UID: \"526944fa-517b-47ad-abf1-75683c7f70a1\") " pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.737240 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.793054 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.813727 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.817054 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.834475 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-v5x64" event={"ID":"ae2cb8c5-974c-4952-920a-9f194e953d8b","Type":"ContainerStarted","Data":"043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d"} Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.834528 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-v5x64" event={"ID":"ae2cb8c5-974c-4952-920a-9f194e953d8b","Type":"ContainerStarted","Data":"35fc2e250a6fd6680e0c213f2e964acba4ce3b93c704beb1f98bedac20d52331"} Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.835187 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"0942aa822465ab35d4874e3448c66cb93fed1a2296fc50c06e66b3a1f6a5a7c5"} Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.837148 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.839706 4809 scope.go:117] "RemoveContainer" containerID="1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864" Sep 30 00:09:31 crc kubenswrapper[4809]: E0930 00:09:31.839830 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.854311 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.870242 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.884703 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.897157 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.907766 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-h6xqr"] Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.908101 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-h6xqr" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.910433 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-p8k9l"] Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.910874 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.911203 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.911379 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-lkdqg"] Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.911587 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.911771 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.912540 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.916115 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.916132 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.916155 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.916238 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.916253 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.916357 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.916715 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.917051 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.918166 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.918441 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.918674 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.918868 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.941888 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.957011 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.972268 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.981901 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdrpr\" (UniqueName: \"kubernetes.io/projected/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-kube-api-access-kdrpr\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.981938 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-host-run-netns\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.981957 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-run-netns\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.981978 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-var-lib-openvswitch\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982001 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-cni-bin\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982021 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-host-run-k8s-cni-cncf-io\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982043 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-multus-conf-dir\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982061 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-etc-kubernetes\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982077 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-env-overrides\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982091 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/efc7b2e1-7308-483a-9117-02e83c45a528-multus-daemon-config\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982120 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-run-systemd\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982152 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-etc-openvswitch\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982173 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-system-cni-dir\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982193 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-multus-cni-dir\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982210 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-hostroot\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982369 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-os-release\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982464 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/efc7b2e1-7308-483a-9117-02e83c45a528-cni-binary-copy\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982545 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-run-ovn-kubernetes\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982586 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982626 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b5a18711-80f7-42d7-a6a9-04996c22c1f0-os-release\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982668 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bb6d\" (UniqueName: \"kubernetes.io/projected/b5a18711-80f7-42d7-a6a9-04996c22c1f0-kube-api-access-8bb6d\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982702 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-cni-netd\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982739 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-slash\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982769 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-run-ovn\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982801 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b5a18711-80f7-42d7-a6a9-04996c22c1f0-cni-binary-copy\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982835 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-ovnkube-script-lib\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982858 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b5a18711-80f7-42d7-a6a9-04996c22c1f0-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982891 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-ovnkube-config\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982920 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-node-log\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982947 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-log-socket\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.982996 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-run-openvswitch\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.983029 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-ovn-node-metrics-cert\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.983053 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-cnibin\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.983078 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-multus-socket-dir-parent\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.983107 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-host-var-lib-cni-bin\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.983135 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-host-var-lib-cni-multus\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.983154 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b5a18711-80f7-42d7-a6a9-04996c22c1f0-cnibin\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.983172 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-host-run-multus-certs\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.983188 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b5a18711-80f7-42d7-a6a9-04996c22c1f0-system-cni-dir\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.983210 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-kubelet\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.983224 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b5a18711-80f7-42d7-a6a9-04996c22c1f0-tuning-conf-dir\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.983245 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-systemd-units\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.983263 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-host-var-lib-kubelet\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.983278 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75c7d\" (UniqueName: \"kubernetes.io/projected/efc7b2e1-7308-483a-9117-02e83c45a528-kube-api-access-75c7d\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:31 crc kubenswrapper[4809]: I0930 00:09:31.993113 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:31Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.005442 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:32Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.028788 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:32Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.048724 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:32Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.061677 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:32Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.081124 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:32Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084450 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-slash\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084493 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-run-ovn\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084512 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b5a18711-80f7-42d7-a6a9-04996c22c1f0-cni-binary-copy\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084533 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-ovnkube-script-lib\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084550 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b5a18711-80f7-42d7-a6a9-04996c22c1f0-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084576 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-ovnkube-config\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084581 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-slash\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084600 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-node-log\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084609 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-run-ovn\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084621 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-log-socket\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084684 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-log-socket\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084689 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-node-log\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084739 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-run-openvswitch\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084773 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-run-openvswitch\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084801 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-host-var-lib-cni-multus\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084828 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-ovn-node-metrics-cert\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084831 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-host-var-lib-cni-multus\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084845 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-cnibin\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084864 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-multus-socket-dir-parent\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084879 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-host-var-lib-cni-bin\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084896 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-host-run-multus-certs\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084913 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b5a18711-80f7-42d7-a6a9-04996c22c1f0-system-cni-dir\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084934 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b5a18711-80f7-42d7-a6a9-04996c22c1f0-cnibin\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084938 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-cnibin\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084955 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-kubelet\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084976 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b5a18711-80f7-42d7-a6a9-04996c22c1f0-tuning-conf-dir\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084985 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b5a18711-80f7-42d7-a6a9-04996c22c1f0-system-cni-dir\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084984 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-host-var-lib-cni-bin\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085018 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-host-run-multus-certs\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085049 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-kubelet\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085020 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-systemd-units\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.084998 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-systemd-units\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085069 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-multus-socket-dir-parent\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085089 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-host-var-lib-kubelet\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085110 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75c7d\" (UniqueName: \"kubernetes.io/projected/efc7b2e1-7308-483a-9117-02e83c45a528-kube-api-access-75c7d\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085155 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b5a18711-80f7-42d7-a6a9-04996c22c1f0-cnibin\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085173 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-host-var-lib-kubelet\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085228 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdrpr\" (UniqueName: \"kubernetes.io/projected/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-kube-api-access-kdrpr\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085243 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-host-run-netns\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085261 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-run-netns\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085312 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-var-lib-openvswitch\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085332 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-cni-bin\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085334 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-host-run-netns\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085346 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-host-run-k8s-cni-cncf-io\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085361 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-multus-conf-dir\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085377 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-env-overrides\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085392 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/efc7b2e1-7308-483a-9117-02e83c45a528-multus-daemon-config\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085407 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-etc-kubernetes\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085432 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-run-systemd\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085443 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-host-run-k8s-cni-cncf-io\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085475 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-etc-openvswitch\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085486 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-run-netns\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085495 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-system-cni-dir\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085514 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-multus-cni-dir\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085521 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b5a18711-80f7-42d7-a6a9-04996c22c1f0-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085531 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-hostroot\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085538 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-ovnkube-config\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085562 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-ovnkube-script-lib\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085546 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-cni-bin\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085585 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b5a18711-80f7-42d7-a6a9-04996c22c1f0-tuning-conf-dir\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085550 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-run-ovn-kubernetes\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085573 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-run-ovn-kubernetes\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085519 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-var-lib-openvswitch\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085617 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-etc-kubernetes\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085630 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-os-release\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085574 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-run-systemd\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085683 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-system-cni-dir\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085704 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-hostroot\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085713 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-multus-cni-dir\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085712 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/efc7b2e1-7308-483a-9117-02e83c45a528-cni-binary-copy\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085720 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-multus-conf-dir\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085752 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-cni-netd\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085791 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-etc-openvswitch\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085816 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085834 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-cni-netd\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085873 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/efc7b2e1-7308-483a-9117-02e83c45a528-os-release\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085887 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b5a18711-80f7-42d7-a6a9-04996c22c1f0-os-release\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085899 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.085943 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bb6d\" (UniqueName: \"kubernetes.io/projected/b5a18711-80f7-42d7-a6a9-04996c22c1f0-kube-api-access-8bb6d\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.086000 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-env-overrides\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.086008 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b5a18711-80f7-42d7-a6a9-04996c22c1f0-os-release\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.086082 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/efc7b2e1-7308-483a-9117-02e83c45a528-multus-daemon-config\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.086184 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/efc7b2e1-7308-483a-9117-02e83c45a528-cni-binary-copy\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.086532 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b5a18711-80f7-42d7-a6a9-04996c22c1f0-cni-binary-copy\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.088577 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-ovn-node-metrics-cert\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.100208 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:32Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.104166 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bb6d\" (UniqueName: \"kubernetes.io/projected/b5a18711-80f7-42d7-a6a9-04996c22c1f0-kube-api-access-8bb6d\") pod \"multus-additional-cni-plugins-p8k9l\" (UID: \"b5a18711-80f7-42d7-a6a9-04996c22c1f0\") " pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.105824 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdrpr\" (UniqueName: \"kubernetes.io/projected/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-kube-api-access-kdrpr\") pod \"ovnkube-node-lkdqg\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.106658 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75c7d\" (UniqueName: \"kubernetes.io/projected/efc7b2e1-7308-483a-9117-02e83c45a528-kube-api-access-75c7d\") pod \"multus-h6xqr\" (UID: \"efc7b2e1-7308-483a-9117-02e83c45a528\") " pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.115323 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:32Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.125893 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:32Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.137576 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:32Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.154354 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:32Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.231540 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-h6xqr" Sep 30 00:09:32 crc kubenswrapper[4809]: W0930 00:09:32.240021 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podefc7b2e1_7308_483a_9117_02e83c45a528.slice/crio-ec6cebd04ea082d8bcbd3c757c07f3d54fd4fcd75dddbe14b488329f31381c4f WatchSource:0}: Error finding container ec6cebd04ea082d8bcbd3c757c07f3d54fd4fcd75dddbe14b488329f31381c4f: Status 404 returned error can't find the container with id ec6cebd04ea082d8bcbd3c757c07f3d54fd4fcd75dddbe14b488329f31381c4f Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.242832 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.249511 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:32 crc kubenswrapper[4809]: W0930 00:09:32.258661 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5a18711_80f7_42d7_a6a9_04996c22c1f0.slice/crio-da061d779c4c84670cb81665752d994d8caa7bfb057702a67a1082bf348928a3 WatchSource:0}: Error finding container da061d779c4c84670cb81665752d994d8caa7bfb057702a67a1082bf348928a3: Status 404 returned error can't find the container with id da061d779c4c84670cb81665752d994d8caa7bfb057702a67a1082bf348928a3 Sep 30 00:09:32 crc kubenswrapper[4809]: W0930 00:09:32.268800 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5ec79e76_2a92_48e7_a55f_f8e630b00ed5.slice/crio-663df6a7998c7a81d329fbaaa888bbbdd7811e4068b0f467bed460e93317c1aa WatchSource:0}: Error finding container 663df6a7998c7a81d329fbaaa888bbbdd7811e4068b0f467bed460e93317c1aa: Status 404 returned error can't find the container with id 663df6a7998c7a81d329fbaaa888bbbdd7811e4068b0f467bed460e93317c1aa Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.844187 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355"} Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.846505 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18"} Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.846565 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408"} Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.848809 4809 generic.go:334] "Generic (PLEG): container finished" podID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerID="c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236" exitCode=0 Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.848876 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerDied","Data":"c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236"} Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.848930 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerStarted","Data":"663df6a7998c7a81d329fbaaa888bbbdd7811e4068b0f467bed460e93317c1aa"} Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.851982 4809 generic.go:334] "Generic (PLEG): container finished" podID="b5a18711-80f7-42d7-a6a9-04996c22c1f0" containerID="8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25" exitCode=0 Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.852088 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" event={"ID":"b5a18711-80f7-42d7-a6a9-04996c22c1f0","Type":"ContainerDied","Data":"8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25"} Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.852166 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" event={"ID":"b5a18711-80f7-42d7-a6a9-04996c22c1f0","Type":"ContainerStarted","Data":"da061d779c4c84670cb81665752d994d8caa7bfb057702a67a1082bf348928a3"} Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.853541 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-h6xqr" event={"ID":"efc7b2e1-7308-483a-9117-02e83c45a528","Type":"ContainerStarted","Data":"a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3"} Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.853586 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-h6xqr" event={"ID":"efc7b2e1-7308-483a-9117-02e83c45a528","Type":"ContainerStarted","Data":"ec6cebd04ea082d8bcbd3c757c07f3d54fd4fcd75dddbe14b488329f31381c4f"} Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.863834 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:32Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.894019 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:32Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.907997 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:32Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.927734 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:32Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.942312 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:32Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.962970 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:32Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.981510 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:32Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:32 crc kubenswrapper[4809]: I0930 00:09:32.996697 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:32Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.008266 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.021256 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.060873 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.087393 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.108500 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.123208 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.136832 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.154018 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.164419 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.185739 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.198185 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.198292 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:33 crc kubenswrapper[4809]: E0930 00:09:33.198426 4809 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 00:09:33 crc kubenswrapper[4809]: E0930 00:09:33.198546 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:37.198517891 +0000 UTC m=+28.234767489 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 00:09:33 crc kubenswrapper[4809]: E0930 00:09:33.198434 4809 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 00:09:33 crc kubenswrapper[4809]: E0930 00:09:33.199054 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:37.199033567 +0000 UTC m=+28.235282975 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.204356 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.220543 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.235684 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.250125 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.265390 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.279341 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.292514 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.299738 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:09:33 crc kubenswrapper[4809]: E0930 00:09:33.299918 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:09:37.299883289 +0000 UTC m=+28.336132697 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.299985 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.300122 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:33 crc kubenswrapper[4809]: E0930 00:09:33.300173 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 00:09:33 crc kubenswrapper[4809]: E0930 00:09:33.300203 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 00:09:33 crc kubenswrapper[4809]: E0930 00:09:33.300220 4809 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:33 crc kubenswrapper[4809]: E0930 00:09:33.300292 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:37.30026339 +0000 UTC m=+28.336512978 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:33 crc kubenswrapper[4809]: E0930 00:09:33.300305 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 00:09:33 crc kubenswrapper[4809]: E0930 00:09:33.300332 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 00:09:33 crc kubenswrapper[4809]: E0930 00:09:33.300346 4809 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:33 crc kubenswrapper[4809]: E0930 00:09:33.300545 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:37.300387574 +0000 UTC m=+28.336637162 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.311745 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.328231 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.347262 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.461174 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-pkt8x"] Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.461528 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-pkt8x" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.466676 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.466848 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.467374 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.468270 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.482624 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.503066 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/b5bd29be-6d83-4a63-bf31-88e891913ba3-serviceca\") pod \"node-ca-pkt8x\" (UID: \"b5bd29be-6d83-4a63-bf31-88e891913ba3\") " pod="openshift-image-registry/node-ca-pkt8x" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.503117 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7qdw\" (UniqueName: \"kubernetes.io/projected/b5bd29be-6d83-4a63-bf31-88e891913ba3-kube-api-access-g7qdw\") pod \"node-ca-pkt8x\" (UID: \"b5bd29be-6d83-4a63-bf31-88e891913ba3\") " pod="openshift-image-registry/node-ca-pkt8x" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.503141 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b5bd29be-6d83-4a63-bf31-88e891913ba3-host\") pod \"node-ca-pkt8x\" (UID: \"b5bd29be-6d83-4a63-bf31-88e891913ba3\") " pod="openshift-image-registry/node-ca-pkt8x" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.503659 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.517040 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.530787 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.542054 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.563818 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.576034 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.591414 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.602980 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.604298 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b5bd29be-6d83-4a63-bf31-88e891913ba3-host\") pod \"node-ca-pkt8x\" (UID: \"b5bd29be-6d83-4a63-bf31-88e891913ba3\") " pod="openshift-image-registry/node-ca-pkt8x" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.604391 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/b5bd29be-6d83-4a63-bf31-88e891913ba3-serviceca\") pod \"node-ca-pkt8x\" (UID: \"b5bd29be-6d83-4a63-bf31-88e891913ba3\") " pod="openshift-image-registry/node-ca-pkt8x" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.604428 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7qdw\" (UniqueName: \"kubernetes.io/projected/b5bd29be-6d83-4a63-bf31-88e891913ba3-kube-api-access-g7qdw\") pod \"node-ca-pkt8x\" (UID: \"b5bd29be-6d83-4a63-bf31-88e891913ba3\") " pod="openshift-image-registry/node-ca-pkt8x" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.604436 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b5bd29be-6d83-4a63-bf31-88e891913ba3-host\") pod \"node-ca-pkt8x\" (UID: \"b5bd29be-6d83-4a63-bf31-88e891913ba3\") " pod="openshift-image-registry/node-ca-pkt8x" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.609437 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/b5bd29be-6d83-4a63-bf31-88e891913ba3-serviceca\") pod \"node-ca-pkt8x\" (UID: \"b5bd29be-6d83-4a63-bf31-88e891913ba3\") " pod="openshift-image-registry/node-ca-pkt8x" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.619121 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.626612 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7qdw\" (UniqueName: \"kubernetes.io/projected/b5bd29be-6d83-4a63-bf31-88e891913ba3-kube-api-access-g7qdw\") pod \"node-ca-pkt8x\" (UID: \"b5bd29be-6d83-4a63-bf31-88e891913ba3\") " pod="openshift-image-registry/node-ca-pkt8x" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.631055 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.648062 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.659592 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.669957 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.688712 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.689813 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.689890 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.689813 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:33 crc kubenswrapper[4809]: E0930 00:09:33.689997 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:09:33 crc kubenswrapper[4809]: E0930 00:09:33.690055 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:09:33 crc kubenswrapper[4809]: E0930 00:09:33.689945 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.801551 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-pkt8x" Sep 30 00:09:33 crc kubenswrapper[4809]: W0930 00:09:33.812065 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5bd29be_6d83_4a63_bf31_88e891913ba3.slice/crio-8e6accd7106e5454bc7c1f805e69a427854af2e258e730ba3ed2d39ff9baa33b WatchSource:0}: Error finding container 8e6accd7106e5454bc7c1f805e69a427854af2e258e730ba3ed2d39ff9baa33b: Status 404 returned error can't find the container with id 8e6accd7106e5454bc7c1f805e69a427854af2e258e730ba3ed2d39ff9baa33b Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.860178 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerStarted","Data":"f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2"} Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.860229 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerStarted","Data":"ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6"} Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.860238 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerStarted","Data":"605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb"} Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.860252 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerStarted","Data":"ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4"} Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.860260 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerStarted","Data":"1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88"} Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.860268 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerStarted","Data":"a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf"} Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.862955 4809 generic.go:334] "Generic (PLEG): container finished" podID="b5a18711-80f7-42d7-a6a9-04996c22c1f0" containerID="0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0" exitCode=0 Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.863038 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" event={"ID":"b5a18711-80f7-42d7-a6a9-04996c22c1f0","Type":"ContainerDied","Data":"0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0"} Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.864012 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-pkt8x" event={"ID":"b5bd29be-6d83-4a63-bf31-88e891913ba3","Type":"ContainerStarted","Data":"8e6accd7106e5454bc7c1f805e69a427854af2e258e730ba3ed2d39ff9baa33b"} Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.881234 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.894509 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.905623 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.919145 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.930782 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.945036 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.956603 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.970573 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.983290 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:33 crc kubenswrapper[4809]: I0930 00:09:33.994190 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:33Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:34 crc kubenswrapper[4809]: I0930 00:09:34.006619 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:34Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:34 crc kubenswrapper[4809]: I0930 00:09:34.029318 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:34Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:34 crc kubenswrapper[4809]: I0930 00:09:34.066953 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:34Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:34 crc kubenswrapper[4809]: I0930 00:09:34.111171 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:34Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:34 crc kubenswrapper[4809]: I0930 00:09:34.156166 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:34Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:34 crc kubenswrapper[4809]: I0930 00:09:34.869234 4809 generic.go:334] "Generic (PLEG): container finished" podID="b5a18711-80f7-42d7-a6a9-04996c22c1f0" containerID="745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361" exitCode=0 Sep 30 00:09:34 crc kubenswrapper[4809]: I0930 00:09:34.869300 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" event={"ID":"b5a18711-80f7-42d7-a6a9-04996c22c1f0","Type":"ContainerDied","Data":"745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361"} Sep 30 00:09:34 crc kubenswrapper[4809]: I0930 00:09:34.870387 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-pkt8x" event={"ID":"b5bd29be-6d83-4a63-bf31-88e891913ba3","Type":"ContainerStarted","Data":"c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968"} Sep 30 00:09:34 crc kubenswrapper[4809]: I0930 00:09:34.883817 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:34Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:34 crc kubenswrapper[4809]: I0930 00:09:34.894072 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:34Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:34 crc kubenswrapper[4809]: I0930 00:09:34.905186 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:34Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:34 crc kubenswrapper[4809]: I0930 00:09:34.924828 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:34Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:34 crc kubenswrapper[4809]: I0930 00:09:34.938867 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:34Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:34 crc kubenswrapper[4809]: I0930 00:09:34.952499 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:34Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:34 crc kubenswrapper[4809]: I0930 00:09:34.964346 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:34Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:34 crc kubenswrapper[4809]: I0930 00:09:34.979081 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:34Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:34 crc kubenswrapper[4809]: I0930 00:09:34.989319 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:34Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.011061 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.026259 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.054374 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.083812 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.095351 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.112580 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.125368 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.142586 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.155853 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.170745 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.184537 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.194134 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.210204 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.221545 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.232351 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.244159 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.257842 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.267247 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.278865 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.306945 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.350905 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.690831 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.690885 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.690962 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:35 crc kubenswrapper[4809]: E0930 00:09:35.691048 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:09:35 crc kubenswrapper[4809]: E0930 00:09:35.691160 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:09:35 crc kubenswrapper[4809]: E0930 00:09:35.691263 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.876285 4809 generic.go:334] "Generic (PLEG): container finished" podID="b5a18711-80f7-42d7-a6a9-04996c22c1f0" containerID="ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c" exitCode=0 Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.876332 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" event={"ID":"b5a18711-80f7-42d7-a6a9-04996c22c1f0","Type":"ContainerDied","Data":"ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c"} Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.888133 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.903208 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.915434 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.929322 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.945074 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.949968 4809 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.952015 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.952049 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.952058 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.952195 4809 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.965694 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.972897 4809 kubelet_node_status.go:115] "Node was previously registered" node="crc" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.973203 4809 kubelet_node_status.go:79] "Successfully registered node" node="crc" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.974487 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.974523 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.974531 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.974546 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.974556 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:35Z","lastTransitionTime":"2025-09-30T00:09:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:35 crc kubenswrapper[4809]: E0930 00:09:35.987415 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.990759 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:35Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.993479 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.993503 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.993512 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.993528 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:35 crc kubenswrapper[4809]: I0930 00:09:35.993540 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:35Z","lastTransitionTime":"2025-09-30T00:09:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.003527 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:36Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:36 crc kubenswrapper[4809]: E0930 00:09:36.005262 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:36Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.008244 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.008293 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.008302 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.008315 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.008325 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:36Z","lastTransitionTime":"2025-09-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.018087 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:36Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:36 crc kubenswrapper[4809]: E0930 00:09:36.023246 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:36Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.030446 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.030491 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.030505 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.030527 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.030542 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:36Z","lastTransitionTime":"2025-09-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.037696 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:36Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:36 crc kubenswrapper[4809]: E0930 00:09:36.050577 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:36Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.056722 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.056780 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.056795 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.056817 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.056833 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:36Z","lastTransitionTime":"2025-09-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.060742 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:36Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.078215 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:36Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:36 crc kubenswrapper[4809]: E0930 00:09:36.078316 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:36Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:36 crc kubenswrapper[4809]: E0930 00:09:36.078478 4809 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.080466 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.080499 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.080510 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.080526 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.080535 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:36Z","lastTransitionTime":"2025-09-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.094011 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:36Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.107841 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:36Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.120031 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:36Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.183858 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.183904 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.183914 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.183932 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.183945 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:36Z","lastTransitionTime":"2025-09-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.286020 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.286048 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.286057 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.286069 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.286078 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:36Z","lastTransitionTime":"2025-09-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.388497 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.388554 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.388566 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.388582 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.388595 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:36Z","lastTransitionTime":"2025-09-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.491145 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.491188 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.491199 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.491218 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.491233 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:36Z","lastTransitionTime":"2025-09-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.534672 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.535823 4809 scope.go:117] "RemoveContainer" containerID="1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864" Sep 30 00:09:36 crc kubenswrapper[4809]: E0930 00:09:36.536035 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.594555 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.594603 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.594614 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.594633 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.594670 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:36Z","lastTransitionTime":"2025-09-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.698445 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.698490 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.698503 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.698521 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.698535 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:36Z","lastTransitionTime":"2025-09-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.800659 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.800709 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.800719 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.800750 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.800763 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:36Z","lastTransitionTime":"2025-09-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.896811 4809 generic.go:334] "Generic (PLEG): container finished" podID="b5a18711-80f7-42d7-a6a9-04996c22c1f0" containerID="7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2" exitCode=0 Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.896880 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" event={"ID":"b5a18711-80f7-42d7-a6a9-04996c22c1f0","Type":"ContainerDied","Data":"7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2"} Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.902284 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.902324 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.902341 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.902365 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.902381 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:36Z","lastTransitionTime":"2025-09-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.902440 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerStarted","Data":"bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5"} Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.909060 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:36Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.923713 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:36Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.937957 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:36Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.956699 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:36Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.969773 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:36Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:36 crc kubenswrapper[4809]: I0930 00:09:36.993401 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:36Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.005137 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.005169 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.005178 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.005193 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.005203 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:37Z","lastTransitionTime":"2025-09-30T00:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.006074 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:37Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.024336 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:37Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.037925 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:37Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.049142 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:37Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.062113 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:37Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.071248 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:37Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.081993 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:37Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.094806 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:37Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.107797 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.107839 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.107848 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.107863 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.107872 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:37Z","lastTransitionTime":"2025-09-30T00:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.109232 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:37Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.210308 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.210339 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.210347 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.210361 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.210369 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:37Z","lastTransitionTime":"2025-09-30T00:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.247544 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.247602 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:37 crc kubenswrapper[4809]: E0930 00:09:37.247717 4809 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 00:09:37 crc kubenswrapper[4809]: E0930 00:09:37.247716 4809 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 00:09:37 crc kubenswrapper[4809]: E0930 00:09:37.247779 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:45.247761253 +0000 UTC m=+36.284010661 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 00:09:37 crc kubenswrapper[4809]: E0930 00:09:37.247830 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:45.247800394 +0000 UTC m=+36.284049832 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.313258 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.313316 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.313329 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.313350 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.313363 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:37Z","lastTransitionTime":"2025-09-30T00:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.348418 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:09:37 crc kubenswrapper[4809]: E0930 00:09:37.348671 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:09:45.348613316 +0000 UTC m=+36.384862794 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.348743 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.348856 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:37 crc kubenswrapper[4809]: E0930 00:09:37.348948 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 00:09:37 crc kubenswrapper[4809]: E0930 00:09:37.348981 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 00:09:37 crc kubenswrapper[4809]: E0930 00:09:37.348993 4809 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:37 crc kubenswrapper[4809]: E0930 00:09:37.349040 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:45.349026028 +0000 UTC m=+36.385275546 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:37 crc kubenswrapper[4809]: E0930 00:09:37.348946 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 00:09:37 crc kubenswrapper[4809]: E0930 00:09:37.349115 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 00:09:37 crc kubenswrapper[4809]: E0930 00:09:37.349134 4809 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:37 crc kubenswrapper[4809]: E0930 00:09:37.349214 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:45.349196483 +0000 UTC m=+36.385445931 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.416076 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.416164 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.416203 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.416245 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.416361 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:37Z","lastTransitionTime":"2025-09-30T00:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.520682 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.520742 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.520758 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.520781 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.520796 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:37Z","lastTransitionTime":"2025-09-30T00:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.624778 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.624816 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.624827 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.624843 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.624854 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:37Z","lastTransitionTime":"2025-09-30T00:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.690809 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:37 crc kubenswrapper[4809]: E0930 00:09:37.690962 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.691356 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:37 crc kubenswrapper[4809]: E0930 00:09:37.691433 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.691470 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:37 crc kubenswrapper[4809]: E0930 00:09:37.691524 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.727818 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.727884 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.727898 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.727924 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.727938 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:37Z","lastTransitionTime":"2025-09-30T00:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.832255 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.832350 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.832376 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.832414 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.832442 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:37Z","lastTransitionTime":"2025-09-30T00:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.911117 4809 generic.go:334] "Generic (PLEG): container finished" podID="b5a18711-80f7-42d7-a6a9-04996c22c1f0" containerID="46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545" exitCode=0 Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.911183 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" event={"ID":"b5a18711-80f7-42d7-a6a9-04996c22c1f0","Type":"ContainerDied","Data":"46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545"} Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.935948 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.936016 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.936036 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.936062 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.936081 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:37Z","lastTransitionTime":"2025-09-30T00:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.937619 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:37Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.971246 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:37Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:37 crc kubenswrapper[4809]: I0930 00:09:37.990429 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:37Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.008244 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:38Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.024740 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:38Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.038570 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:38Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.043506 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.043568 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.043593 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.043624 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.043681 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:38Z","lastTransitionTime":"2025-09-30T00:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.059083 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:38Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.071372 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:38Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.090618 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:38Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.118678 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:38Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.144689 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:38Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.147336 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.147378 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.147390 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.147410 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.147423 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:38Z","lastTransitionTime":"2025-09-30T00:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.159886 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:38Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.172770 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:38Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.187702 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:38Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.197933 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:38Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.249426 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.249456 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.249464 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.249478 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.249487 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:38Z","lastTransitionTime":"2025-09-30T00:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.352459 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.352496 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.352504 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.352516 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.352526 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:38Z","lastTransitionTime":"2025-09-30T00:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.455952 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.456001 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.456014 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.456032 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.456044 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:38Z","lastTransitionTime":"2025-09-30T00:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.558950 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.559004 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.559020 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.559040 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.559055 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:38Z","lastTransitionTime":"2025-09-30T00:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.662037 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.662091 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.662103 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.662121 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.662134 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:38Z","lastTransitionTime":"2025-09-30T00:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.765429 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.765499 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.765522 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.765549 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.765575 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:38Z","lastTransitionTime":"2025-09-30T00:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.869670 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.869723 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.869733 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.869750 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.869762 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:38Z","lastTransitionTime":"2025-09-30T00:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.922806 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerStarted","Data":"a21d25f6284fc8fcaf593e0c9b5ab97a0a2ef2f2165c27d96a3baa2a9077de61"} Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.923389 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.923460 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.929104 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" event={"ID":"b5a18711-80f7-42d7-a6a9-04996c22c1f0","Type":"ContainerStarted","Data":"ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36"} Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.939504 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:38Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.960125 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.961783 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.965040 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a21d25f6284fc8fcaf593e0c9b5ab97a0a2ef2f2165c27d96a3baa2a9077de61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:38Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.972055 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.972130 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.972156 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.972186 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:38 crc kubenswrapper[4809]: I0930 00:09:38.972207 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:38Z","lastTransitionTime":"2025-09-30T00:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.000273 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:38Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.020284 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.036815 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.050352 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.065683 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.074191 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.074240 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.074252 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.074270 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.074282 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:39Z","lastTransitionTime":"2025-09-30T00:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.086048 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.102198 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.120149 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.135899 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.146250 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.159744 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.174828 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.176797 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.176837 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.176845 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.176862 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.176874 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:39Z","lastTransitionTime":"2025-09-30T00:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.186731 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.203203 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.226726 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a21d25f6284fc8fcaf593e0c9b5ab97a0a2ef2f2165c27d96a3baa2a9077de61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.241832 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.255007 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.267920 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.279584 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.279616 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.279653 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.279674 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.279690 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:39Z","lastTransitionTime":"2025-09-30T00:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.280746 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.307538 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.330139 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.348902 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.367061 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.382881 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.382925 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.382937 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.382953 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.382967 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:39Z","lastTransitionTime":"2025-09-30T00:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.389265 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.402342 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.415525 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.426920 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.445482 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.485690 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.485731 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.485742 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.485760 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.485771 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:39Z","lastTransitionTime":"2025-09-30T00:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.588849 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.588933 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.588959 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.588993 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.589018 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:39Z","lastTransitionTime":"2025-09-30T00:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.690470 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.690615 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.690766 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:39 crc kubenswrapper[4809]: E0930 00:09:39.691010 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:09:39 crc kubenswrapper[4809]: E0930 00:09:39.691215 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:09:39 crc kubenswrapper[4809]: E0930 00:09:39.691372 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.692450 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.692499 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.692519 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.692545 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.692565 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:39Z","lastTransitionTime":"2025-09-30T00:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.713128 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.729537 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.754901 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.769185 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.802951 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.802983 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.802991 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.803006 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.803017 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:39Z","lastTransitionTime":"2025-09-30T00:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.838040 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.874479 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.889934 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.903442 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.904811 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.904836 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.904847 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.904870 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.904890 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:39Z","lastTransitionTime":"2025-09-30T00:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.916099 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.925601 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.931804 4809 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.940927 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.954738 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.964131 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.978480 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:39 crc kubenswrapper[4809]: I0930 00:09:39.998681 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a21d25f6284fc8fcaf593e0c9b5ab97a0a2ef2f2165c27d96a3baa2a9077de61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:39Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.007460 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.007505 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.007518 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.007540 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.007552 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:40Z","lastTransitionTime":"2025-09-30T00:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.110848 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.111168 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.111301 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.111436 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.111551 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:40Z","lastTransitionTime":"2025-09-30T00:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.214994 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.215070 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.215093 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.215123 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.215142 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:40Z","lastTransitionTime":"2025-09-30T00:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.318259 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.318344 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.318368 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.318395 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.318414 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:40Z","lastTransitionTime":"2025-09-30T00:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.421114 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.421172 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.421184 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.421202 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.421214 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:40Z","lastTransitionTime":"2025-09-30T00:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.523694 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.523737 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.523747 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.523762 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.523772 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:40Z","lastTransitionTime":"2025-09-30T00:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.625435 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.625470 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.625486 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.625501 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.625510 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:40Z","lastTransitionTime":"2025-09-30T00:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.727899 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.727936 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.727944 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.727957 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.727965 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:40Z","lastTransitionTime":"2025-09-30T00:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.830927 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.831000 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.831019 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.831045 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.831063 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:40Z","lastTransitionTime":"2025-09-30T00:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.935701 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.935815 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.935839 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.935870 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.935893 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:40Z","lastTransitionTime":"2025-09-30T00:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:40 crc kubenswrapper[4809]: I0930 00:09:40.940818 4809 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.039203 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.039679 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.039699 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.039764 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.039787 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:41Z","lastTransitionTime":"2025-09-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.143725 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.143793 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.143806 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.143828 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.143840 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:41Z","lastTransitionTime":"2025-09-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.247040 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.247076 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.247084 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.247100 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.247110 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:41Z","lastTransitionTime":"2025-09-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.350204 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.350290 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.350314 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.350344 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.350366 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:41Z","lastTransitionTime":"2025-09-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.454144 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.454209 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.454229 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.454253 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.454271 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:41Z","lastTransitionTime":"2025-09-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.557039 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.557109 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.557130 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.557155 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.557175 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:41Z","lastTransitionTime":"2025-09-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.660408 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.660452 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.660463 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.660479 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.660492 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:41Z","lastTransitionTime":"2025-09-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.690225 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.690241 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:41 crc kubenswrapper[4809]: E0930 00:09:41.690392 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:09:41 crc kubenswrapper[4809]: E0930 00:09:41.690483 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.690257 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:41 crc kubenswrapper[4809]: E0930 00:09:41.690677 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.763174 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.763216 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.763230 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.763250 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.763262 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:41Z","lastTransitionTime":"2025-09-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.865687 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.865739 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.865759 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.865783 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.865801 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:41Z","lastTransitionTime":"2025-09-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.941169 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lkdqg_5ec79e76-2a92-48e7-a55f-f8e630b00ed5/ovnkube-controller/0.log" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.944705 4809 generic.go:334] "Generic (PLEG): container finished" podID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerID="a21d25f6284fc8fcaf593e0c9b5ab97a0a2ef2f2165c27d96a3baa2a9077de61" exitCode=1 Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.944766 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerDied","Data":"a21d25f6284fc8fcaf593e0c9b5ab97a0a2ef2f2165c27d96a3baa2a9077de61"} Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.945963 4809 scope.go:117] "RemoveContainer" containerID="a21d25f6284fc8fcaf593e0c9b5ab97a0a2ef2f2165c27d96a3baa2a9077de61" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.980190 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.980242 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.980260 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.980284 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.980302 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:41Z","lastTransitionTime":"2025-09-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:41 crc kubenswrapper[4809]: I0930 00:09:41.986200 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:41Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.002934 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.017291 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.031906 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.043510 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.057353 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.074740 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.083218 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.083311 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.083330 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.083355 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.083373 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:42Z","lastTransitionTime":"2025-09-30T00:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.090592 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.105042 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.117043 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.129560 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.144404 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.156301 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.170266 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.185163 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.185196 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.185204 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.185218 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.185227 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:42Z","lastTransitionTime":"2025-09-30T00:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.192594 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a21d25f6284fc8fcaf593e0c9b5ab97a0a2ef2f2165c27d96a3baa2a9077de61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a21d25f6284fc8fcaf593e0c9b5ab97a0a2ef2f2165c27d96a3baa2a9077de61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:41Z\\\",\\\"message\\\":\\\"ions/factory.go:140\\\\nI0930 00:09:41.148915 6076 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 00:09:41.148943 6076 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 00:09:41.149150 6076 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 00:09:41.149480 6076 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 00:09:41.150624 6076 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:09:41.150697 6076 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:09:41.150736 6076 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 00:09:41.150753 6076 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:09:41.150773 6076 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:09:41.150889 6076 factory.go:656] Stopping watch factory\\\\nI0930 00:09:41.150917 6076 ovnkube.go:599] Stopped ovnkube\\\\nI0930 00:09:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.361061 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.361097 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.361107 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.361121 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.361130 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:42Z","lastTransitionTime":"2025-09-30T00:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.463129 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.463167 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.463178 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.463194 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.463205 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:42Z","lastTransitionTime":"2025-09-30T00:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.565342 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.565383 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.565391 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.565404 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.565412 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:42Z","lastTransitionTime":"2025-09-30T00:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.667370 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.667408 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.667418 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.667433 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.667443 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:42Z","lastTransitionTime":"2025-09-30T00:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.769705 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.769750 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.769768 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.769790 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.769807 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:42Z","lastTransitionTime":"2025-09-30T00:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.873101 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.873157 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.873172 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.873193 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.873210 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:42Z","lastTransitionTime":"2025-09-30T00:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.951269 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lkdqg_5ec79e76-2a92-48e7-a55f-f8e630b00ed5/ovnkube-controller/1.log" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.952418 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lkdqg_5ec79e76-2a92-48e7-a55f-f8e630b00ed5/ovnkube-controller/0.log" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.955275 4809 generic.go:334] "Generic (PLEG): container finished" podID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerID="a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec" exitCode=1 Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.955351 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerDied","Data":"a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec"} Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.955410 4809 scope.go:117] "RemoveContainer" containerID="a21d25f6284fc8fcaf593e0c9b5ab97a0a2ef2f2165c27d96a3baa2a9077de61" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.956728 4809 scope.go:117] "RemoveContainer" containerID="a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec" Sep 30 00:09:42 crc kubenswrapper[4809]: E0930 00:09:42.957046 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-lkdqg_openshift-ovn-kubernetes(5ec79e76-2a92-48e7-a55f-f8e630b00ed5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.974697 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.975902 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.975983 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.975998 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.976015 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.976028 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:42Z","lastTransitionTime":"2025-09-30T00:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:42 crc kubenswrapper[4809]: I0930 00:09:42.992559 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.005363 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:43Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.018403 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:43Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.038045 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:43Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.056325 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:43Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.072525 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:43Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.077976 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.078035 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.078088 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.078112 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.078127 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:43Z","lastTransitionTime":"2025-09-30T00:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.087440 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:43Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.106879 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:43Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.115581 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:43Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.129851 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:43Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.145856 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:43Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.155957 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:43Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.166962 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:43Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.180514 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.180561 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.180573 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.180588 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.180599 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:43Z","lastTransitionTime":"2025-09-30T00:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.183268 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a21d25f6284fc8fcaf593e0c9b5ab97a0a2ef2f2165c27d96a3baa2a9077de61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:41Z\\\",\\\"message\\\":\\\"ions/factory.go:140\\\\nI0930 00:09:41.148915 6076 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 00:09:41.148943 6076 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 00:09:41.149150 6076 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 00:09:41.149480 6076 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 00:09:41.150624 6076 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:09:41.150697 6076 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:09:41.150736 6076 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 00:09:41.150753 6076 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:09:41.150773 6076 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:09:41.150889 6076 factory.go:656] Stopping watch factory\\\\nI0930 00:09:41.150917 6076 ovnkube.go:599] Stopped ovnkube\\\\nI0930 00:09:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:42Z\\\",\\\"message\\\":\\\" 6200 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z]\\\\nI0930 00:09:42.867631 6200 admin_network_policy_pod.go:56] Processing sync for Pod openshift-network-node-identity/network-node-identity-vrzqb in Admin Network Policy controller\\\\nI0930 00:09:42.867676 6200 admin_network_policy_pod.go:59] Finished syncing Pod openshift-network-node-identity/network-node-identity-vrzqb Admin Network Policy controller: took 46.511µs\\\\nI0930 00:09:42.867694 6200 admin_network_policy_pod.go:56] Processing sync for Pod openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj in Admin Network Policy controller\\\\nI0930 00:09:42.867707 6200 admin_network_policy_pod.go:59] Finished syncing Pod openshift-opera\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:43Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.282696 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.282734 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.282745 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.282760 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.282771 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:43Z","lastTransitionTime":"2025-09-30T00:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.385128 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.385158 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.385167 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.385180 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.385193 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:43Z","lastTransitionTime":"2025-09-30T00:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.490906 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.490965 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.490978 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.490997 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.491009 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:43Z","lastTransitionTime":"2025-09-30T00:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.593487 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.593543 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.593563 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.593588 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.593607 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:43Z","lastTransitionTime":"2025-09-30T00:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.690128 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.690136 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:43 crc kubenswrapper[4809]: E0930 00:09:43.690372 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.690535 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:43 crc kubenswrapper[4809]: E0930 00:09:43.690750 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:09:43 crc kubenswrapper[4809]: E0930 00:09:43.690910 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.698937 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.699012 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.699039 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.699067 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.699091 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:43Z","lastTransitionTime":"2025-09-30T00:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.802180 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.802243 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.802265 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.802295 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.802316 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:43Z","lastTransitionTime":"2025-09-30T00:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.904461 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.904534 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.904552 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.904575 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.904591 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:43Z","lastTransitionTime":"2025-09-30T00:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:43 crc kubenswrapper[4809]: I0930 00:09:43.960693 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lkdqg_5ec79e76-2a92-48e7-a55f-f8e630b00ed5/ovnkube-controller/1.log" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.007147 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.007185 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.007196 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.007211 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.007223 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:44Z","lastTransitionTime":"2025-09-30T00:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.109991 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.110044 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.110061 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.110084 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.110103 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:44Z","lastTransitionTime":"2025-09-30T00:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.212204 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.212231 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.212238 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.212253 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.212262 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:44Z","lastTransitionTime":"2025-09-30T00:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.315194 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.315271 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.315294 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.315333 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.315351 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:44Z","lastTransitionTime":"2025-09-30T00:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.418224 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.418292 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.418306 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.418323 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.418335 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:44Z","lastTransitionTime":"2025-09-30T00:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.521312 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.521375 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.521397 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.521426 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.521449 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:44Z","lastTransitionTime":"2025-09-30T00:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.624823 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.624881 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.624897 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.624920 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.624936 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:44Z","lastTransitionTime":"2025-09-30T00:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.728297 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.728360 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.728381 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.728410 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.728428 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:44Z","lastTransitionTime":"2025-09-30T00:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.830888 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.830939 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.830953 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.830974 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.830989 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:44Z","lastTransitionTime":"2025-09-30T00:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.933968 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.934028 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.934040 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.934061 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:44 crc kubenswrapper[4809]: I0930 00:09:44.934077 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:44Z","lastTransitionTime":"2025-09-30T00:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.037244 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.037316 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.037338 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.037364 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.037382 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:45Z","lastTransitionTime":"2025-09-30T00:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.059248 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49"] Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.060095 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.062609 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.062926 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.094146 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:45Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.108986 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:45Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.124066 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:45Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.140934 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.140990 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.141011 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.141031 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.141045 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:45Z","lastTransitionTime":"2025-09-30T00:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.141582 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:45Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.151842 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:45Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.166878 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"830a7e52-5a85-4555-8bcb-57828953b475\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qdg49\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:45Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.180036 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:45Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.187329 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nz87s\" (UniqueName: \"kubernetes.io/projected/830a7e52-5a85-4555-8bcb-57828953b475-kube-api-access-nz87s\") pod \"ovnkube-control-plane-749d76644c-qdg49\" (UID: \"830a7e52-5a85-4555-8bcb-57828953b475\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.187399 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/830a7e52-5a85-4555-8bcb-57828953b475-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-qdg49\" (UID: \"830a7e52-5a85-4555-8bcb-57828953b475\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.187428 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/830a7e52-5a85-4555-8bcb-57828953b475-env-overrides\") pod \"ovnkube-control-plane-749d76644c-qdg49\" (UID: \"830a7e52-5a85-4555-8bcb-57828953b475\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.187458 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/830a7e52-5a85-4555-8bcb-57828953b475-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-qdg49\" (UID: \"830a7e52-5a85-4555-8bcb-57828953b475\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.197046 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:45Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.212338 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:45Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.224379 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:45Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.239025 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:45Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.243147 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.243193 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.243213 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.243233 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.243247 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:45Z","lastTransitionTime":"2025-09-30T00:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.256104 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:45Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.268817 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:45Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.281870 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:45Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.288089 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nz87s\" (UniqueName: \"kubernetes.io/projected/830a7e52-5a85-4555-8bcb-57828953b475-kube-api-access-nz87s\") pod \"ovnkube-control-plane-749d76644c-qdg49\" (UID: \"830a7e52-5a85-4555-8bcb-57828953b475\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.288360 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:45 crc kubenswrapper[4809]: E0930 00:09:45.288474 4809 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 00:09:45 crc kubenswrapper[4809]: E0930 00:09:45.288565 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 00:10:01.288547142 +0000 UTC m=+52.324796550 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.288490 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/830a7e52-5a85-4555-8bcb-57828953b475-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-qdg49\" (UID: \"830a7e52-5a85-4555-8bcb-57828953b475\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.288745 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/830a7e52-5a85-4555-8bcb-57828953b475-env-overrides\") pod \"ovnkube-control-plane-749d76644c-qdg49\" (UID: \"830a7e52-5a85-4555-8bcb-57828953b475\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.288797 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/830a7e52-5a85-4555-8bcb-57828953b475-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-qdg49\" (UID: \"830a7e52-5a85-4555-8bcb-57828953b475\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.288868 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:45 crc kubenswrapper[4809]: E0930 00:09:45.288966 4809 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 00:09:45 crc kubenswrapper[4809]: E0930 00:09:45.289006 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 00:10:01.288993825 +0000 UTC m=+52.325243423 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.290555 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/830a7e52-5a85-4555-8bcb-57828953b475-env-overrides\") pod \"ovnkube-control-plane-749d76644c-qdg49\" (UID: \"830a7e52-5a85-4555-8bcb-57828953b475\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.290976 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/830a7e52-5a85-4555-8bcb-57828953b475-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-qdg49\" (UID: \"830a7e52-5a85-4555-8bcb-57828953b475\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.295428 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/830a7e52-5a85-4555-8bcb-57828953b475-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-qdg49\" (UID: \"830a7e52-5a85-4555-8bcb-57828953b475\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.304426 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a21d25f6284fc8fcaf593e0c9b5ab97a0a2ef2f2165c27d96a3baa2a9077de61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:41Z\\\",\\\"message\\\":\\\"ions/factory.go:140\\\\nI0930 00:09:41.148915 6076 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 00:09:41.148943 6076 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 00:09:41.149150 6076 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 00:09:41.149480 6076 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 00:09:41.150624 6076 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:09:41.150697 6076 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:09:41.150736 6076 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 00:09:41.150753 6076 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:09:41.150773 6076 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:09:41.150889 6076 factory.go:656] Stopping watch factory\\\\nI0930 00:09:41.150917 6076 ovnkube.go:599] Stopped ovnkube\\\\nI0930 00:09:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:42Z\\\",\\\"message\\\":\\\" 6200 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z]\\\\nI0930 00:09:42.867631 6200 admin_network_policy_pod.go:56] Processing sync for Pod openshift-network-node-identity/network-node-identity-vrzqb in Admin Network Policy controller\\\\nI0930 00:09:42.867676 6200 admin_network_policy_pod.go:59] Finished syncing Pod openshift-network-node-identity/network-node-identity-vrzqb Admin Network Policy controller: took 46.511µs\\\\nI0930 00:09:42.867694 6200 admin_network_policy_pod.go:56] Processing sync for Pod openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj in Admin Network Policy controller\\\\nI0930 00:09:42.867707 6200 admin_network_policy_pod.go:59] Finished syncing Pod openshift-opera\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:45Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.306743 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nz87s\" (UniqueName: \"kubernetes.io/projected/830a7e52-5a85-4555-8bcb-57828953b475-kube-api-access-nz87s\") pod \"ovnkube-control-plane-749d76644c-qdg49\" (UID: \"830a7e52-5a85-4555-8bcb-57828953b475\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.316020 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:45Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.344947 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.344987 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.344996 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.345011 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.345020 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:45Z","lastTransitionTime":"2025-09-30T00:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.382491 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.389586 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.389815 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.389881 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:45 crc kubenswrapper[4809]: E0930 00:09:45.390053 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 00:09:45 crc kubenswrapper[4809]: E0930 00:09:45.390081 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 00:09:45 crc kubenswrapper[4809]: E0930 00:09:45.390097 4809 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:45 crc kubenswrapper[4809]: E0930 00:09:45.390158 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 00:10:01.390139057 +0000 UTC m=+52.426388475 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:45 crc kubenswrapper[4809]: E0930 00:09:45.390234 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 00:09:45 crc kubenswrapper[4809]: E0930 00:09:45.390280 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 00:09:45 crc kubenswrapper[4809]: E0930 00:09:45.390304 4809 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:45 crc kubenswrapper[4809]: E0930 00:09:45.390404 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 00:10:01.390373533 +0000 UTC m=+52.426622991 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:09:45 crc kubenswrapper[4809]: E0930 00:09:45.390881 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:10:01.390859288 +0000 UTC m=+52.427108736 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:09:45 crc kubenswrapper[4809]: W0930 00:09:45.400261 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod830a7e52_5a85_4555_8bcb_57828953b475.slice/crio-a5ee1ddcbc78899407bd4e36b46f3fc984e533241d7bebff354754da4792d716 WatchSource:0}: Error finding container a5ee1ddcbc78899407bd4e36b46f3fc984e533241d7bebff354754da4792d716: Status 404 returned error can't find the container with id a5ee1ddcbc78899407bd4e36b46f3fc984e533241d7bebff354754da4792d716 Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.446989 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.447029 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.447038 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.447054 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.447063 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:45Z","lastTransitionTime":"2025-09-30T00:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.549458 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.549485 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.549494 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.549507 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.549517 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:45Z","lastTransitionTime":"2025-09-30T00:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.651076 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.651119 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.651130 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.651147 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.651159 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:45Z","lastTransitionTime":"2025-09-30T00:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.690483 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.690518 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:45 crc kubenswrapper[4809]: E0930 00:09:45.690622 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.690665 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:45 crc kubenswrapper[4809]: E0930 00:09:45.690770 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:09:45 crc kubenswrapper[4809]: E0930 00:09:45.690872 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.754471 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.754511 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.754520 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.754536 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.754545 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:45Z","lastTransitionTime":"2025-09-30T00:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.856888 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.856921 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.856930 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.856942 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.856950 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:45Z","lastTransitionTime":"2025-09-30T00:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.959844 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.959914 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.959933 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.959957 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.959971 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:45Z","lastTransitionTime":"2025-09-30T00:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.974506 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" event={"ID":"830a7e52-5a85-4555-8bcb-57828953b475","Type":"ContainerStarted","Data":"5573285e4e2c628192a641224044c3a187785a35ff8f698318b1b2bd727d4dbe"} Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.974631 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" event={"ID":"830a7e52-5a85-4555-8bcb-57828953b475","Type":"ContainerStarted","Data":"1e6afe1ae0b09081f5403143b4305f74ea3cac0f0ced46daef33d5601a15e7ad"} Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.974660 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" event={"ID":"830a7e52-5a85-4555-8bcb-57828953b475","Type":"ContainerStarted","Data":"a5ee1ddcbc78899407bd4e36b46f3fc984e533241d7bebff354754da4792d716"} Sep 30 00:09:45 crc kubenswrapper[4809]: I0930 00:09:45.996843 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:45Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.014725 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.031828 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.047334 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.062976 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.063020 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.063035 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.063055 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.063072 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:46Z","lastTransitionTime":"2025-09-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.069515 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.088521 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.112708 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.127422 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.135320 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.135364 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.135376 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.135394 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.135406 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:46Z","lastTransitionTime":"2025-09-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.142715 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-4ktzq"] Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.142900 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.143150 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:09:46 crc kubenswrapper[4809]: E0930 00:09:46.143208 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:09:46 crc kubenswrapper[4809]: E0930 00:09:46.147913 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.151003 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.151054 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.151063 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.151076 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.151084 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:46Z","lastTransitionTime":"2025-09-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:46 crc kubenswrapper[4809]: E0930 00:09:46.162885 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.166877 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.166903 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.166911 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.166925 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.166934 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:46Z","lastTransitionTime":"2025-09-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.168584 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a21d25f6284fc8fcaf593e0c9b5ab97a0a2ef2f2165c27d96a3baa2a9077de61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:41Z\\\",\\\"message\\\":\\\"ions/factory.go:140\\\\nI0930 00:09:41.148915 6076 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 00:09:41.148943 6076 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 00:09:41.149150 6076 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 00:09:41.149480 6076 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 00:09:41.150624 6076 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:09:41.150697 6076 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:09:41.150736 6076 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 00:09:41.150753 6076 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:09:41.150773 6076 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:09:41.150889 6076 factory.go:656] Stopping watch factory\\\\nI0930 00:09:41.150917 6076 ovnkube.go:599] Stopped ovnkube\\\\nI0930 00:09:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:42Z\\\",\\\"message\\\":\\\" 6200 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z]\\\\nI0930 00:09:42.867631 6200 admin_network_policy_pod.go:56] Processing sync for Pod openshift-network-node-identity/network-node-identity-vrzqb in Admin Network Policy controller\\\\nI0930 00:09:42.867676 6200 admin_network_policy_pod.go:59] Finished syncing Pod openshift-network-node-identity/network-node-identity-vrzqb Admin Network Policy controller: took 46.511µs\\\\nI0930 00:09:42.867694 6200 admin_network_policy_pod.go:56] Processing sync for Pod openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj in Admin Network Policy controller\\\\nI0930 00:09:42.867707 6200 admin_network_policy_pod.go:59] Finished syncing Pod openshift-opera\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: E0930 00:09:46.185808 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.189200 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.189236 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.189247 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.189263 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.189272 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:46Z","lastTransitionTime":"2025-09-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.192500 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: E0930 00:09:46.205962 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.208805 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.210060 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.210108 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.210120 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.210137 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.210148 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:46Z","lastTransitionTime":"2025-09-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.222653 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: E0930 00:09:46.224757 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: E0930 00:09:46.224980 4809 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.227448 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.227497 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.227512 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.227537 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.227553 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:46Z","lastTransitionTime":"2025-09-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.236357 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.248105 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.259237 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"830a7e52-5a85-4555-8bcb-57828953b475\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e6afe1ae0b09081f5403143b4305f74ea3cac0f0ced46daef33d5601a15e7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5573285e4e2c628192a641224044c3a187785a35ff8f698318b1b2bd727d4dbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qdg49\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.270436 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.282610 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.293283 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.299167 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwr5c\" (UniqueName: \"kubernetes.io/projected/43a82899-34fa-4d03-9856-7303839926c2-kube-api-access-kwr5c\") pod \"network-metrics-daemon-4ktzq\" (UID: \"43a82899-34fa-4d03-9856-7303839926c2\") " pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.299295 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs\") pod \"network-metrics-daemon-4ktzq\" (UID: \"43a82899-34fa-4d03-9856-7303839926c2\") " pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.307623 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"830a7e52-5a85-4555-8bcb-57828953b475\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e6afe1ae0b09081f5403143b4305f74ea3cac0f0ced46daef33d5601a15e7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5573285e4e2c628192a641224044c3a187785a35ff8f698318b1b2bd727d4dbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qdg49\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.325187 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.329773 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.329815 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.329830 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.329848 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.329860 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:46Z","lastTransitionTime":"2025-09-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.336843 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.345919 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43a82899-34fa-4d03-9856-7303839926c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4ktzq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.357324 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.368241 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.380261 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.390172 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.400443 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwr5c\" (UniqueName: \"kubernetes.io/projected/43a82899-34fa-4d03-9856-7303839926c2-kube-api-access-kwr5c\") pod \"network-metrics-daemon-4ktzq\" (UID: \"43a82899-34fa-4d03-9856-7303839926c2\") " pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.400496 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs\") pod \"network-metrics-daemon-4ktzq\" (UID: \"43a82899-34fa-4d03-9856-7303839926c2\") " pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:09:46 crc kubenswrapper[4809]: E0930 00:09:46.400611 4809 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 00:09:46 crc kubenswrapper[4809]: E0930 00:09:46.400692 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs podName:43a82899-34fa-4d03-9856-7303839926c2 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:46.900674843 +0000 UTC m=+37.936924251 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs") pod "network-metrics-daemon-4ktzq" (UID: "43a82899-34fa-4d03-9856-7303839926c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.404711 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.413330 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.420211 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwr5c\" (UniqueName: \"kubernetes.io/projected/43a82899-34fa-4d03-9856-7303839926c2-kube-api-access-kwr5c\") pod \"network-metrics-daemon-4ktzq\" (UID: \"43a82899-34fa-4d03-9856-7303839926c2\") " pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.426379 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.431730 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.431789 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.431798 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.431813 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.431824 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:46Z","lastTransitionTime":"2025-09-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.443755 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.453751 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.471879 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a21d25f6284fc8fcaf593e0c9b5ab97a0a2ef2f2165c27d96a3baa2a9077de61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:41Z\\\",\\\"message\\\":\\\"ions/factory.go:140\\\\nI0930 00:09:41.148915 6076 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 00:09:41.148943 6076 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 00:09:41.149150 6076 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 00:09:41.149480 6076 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 00:09:41.150624 6076 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:09:41.150697 6076 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:09:41.150736 6076 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 00:09:41.150753 6076 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:09:41.150773 6076 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:09:41.150889 6076 factory.go:656] Stopping watch factory\\\\nI0930 00:09:41.150917 6076 ovnkube.go:599] Stopped ovnkube\\\\nI0930 00:09:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:42Z\\\",\\\"message\\\":\\\" 6200 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z]\\\\nI0930 00:09:42.867631 6200 admin_network_policy_pod.go:56] Processing sync for Pod openshift-network-node-identity/network-node-identity-vrzqb in Admin Network Policy controller\\\\nI0930 00:09:42.867676 6200 admin_network_policy_pod.go:59] Finished syncing Pod openshift-network-node-identity/network-node-identity-vrzqb Admin Network Policy controller: took 46.511µs\\\\nI0930 00:09:42.867694 6200 admin_network_policy_pod.go:56] Processing sync for Pod openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj in Admin Network Policy controller\\\\nI0930 00:09:42.867707 6200 admin_network_policy_pod.go:59] Finished syncing Pod openshift-opera\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:46Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.534144 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.534443 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.534576 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.534769 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.534899 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:46Z","lastTransitionTime":"2025-09-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.637867 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.637924 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.637946 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.637974 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.637996 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:46Z","lastTransitionTime":"2025-09-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.740781 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.740882 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.740895 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.740912 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.740924 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:46Z","lastTransitionTime":"2025-09-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.844520 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.844595 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.844605 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.844621 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.844631 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:46Z","lastTransitionTime":"2025-09-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.906223 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs\") pod \"network-metrics-daemon-4ktzq\" (UID: \"43a82899-34fa-4d03-9856-7303839926c2\") " pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:09:46 crc kubenswrapper[4809]: E0930 00:09:46.906533 4809 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 00:09:46 crc kubenswrapper[4809]: E0930 00:09:46.906707 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs podName:43a82899-34fa-4d03-9856-7303839926c2 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:47.906638116 +0000 UTC m=+38.942887574 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs") pod "network-metrics-daemon-4ktzq" (UID: "43a82899-34fa-4d03-9856-7303839926c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.948175 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.948242 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.948261 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.948286 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:46 crc kubenswrapper[4809]: I0930 00:09:46.948306 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:46Z","lastTransitionTime":"2025-09-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.050602 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.050705 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.050739 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.050773 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.050797 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:47Z","lastTransitionTime":"2025-09-30T00:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.153417 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.153460 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.153477 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.153493 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.153505 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:47Z","lastTransitionTime":"2025-09-30T00:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.257108 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.257172 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.257194 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.257227 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.257255 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:47Z","lastTransitionTime":"2025-09-30T00:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.360312 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.360382 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.360399 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.360422 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.360439 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:47Z","lastTransitionTime":"2025-09-30T00:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.464197 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.464278 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.464303 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.464333 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.464357 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:47Z","lastTransitionTime":"2025-09-30T00:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.567483 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.567545 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.567563 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.567588 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.567607 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:47Z","lastTransitionTime":"2025-09-30T00:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.670934 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.671237 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.671320 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.671414 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.671515 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:47Z","lastTransitionTime":"2025-09-30T00:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.690767 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.690794 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:47 crc kubenswrapper[4809]: E0930 00:09:47.690935 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.691039 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:09:47 crc kubenswrapper[4809]: E0930 00:09:47.691176 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.691070 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:47 crc kubenswrapper[4809]: E0930 00:09:47.691366 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:09:47 crc kubenswrapper[4809]: E0930 00:09:47.691439 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.774848 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.775191 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.775438 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.775735 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.775905 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:47Z","lastTransitionTime":"2025-09-30T00:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.879057 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.879393 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.879535 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.879722 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.879910 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:47Z","lastTransitionTime":"2025-09-30T00:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.914883 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs\") pod \"network-metrics-daemon-4ktzq\" (UID: \"43a82899-34fa-4d03-9856-7303839926c2\") " pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:09:47 crc kubenswrapper[4809]: E0930 00:09:47.915113 4809 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 00:09:47 crc kubenswrapper[4809]: E0930 00:09:47.915281 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs podName:43a82899-34fa-4d03-9856-7303839926c2 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:49.915259486 +0000 UTC m=+40.951508924 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs") pod "network-metrics-daemon-4ktzq" (UID: "43a82899-34fa-4d03-9856-7303839926c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.983423 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.983489 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.983512 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.983536 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:47 crc kubenswrapper[4809]: I0930 00:09:47.983552 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:47Z","lastTransitionTime":"2025-09-30T00:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.087141 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.087230 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.087258 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.087288 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.087306 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:48Z","lastTransitionTime":"2025-09-30T00:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.191148 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.191235 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.191258 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.191290 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.191309 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:48Z","lastTransitionTime":"2025-09-30T00:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.293378 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.293424 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.293435 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.293450 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.293458 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:48Z","lastTransitionTime":"2025-09-30T00:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.395677 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.395719 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.395731 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.395748 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.395760 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:48Z","lastTransitionTime":"2025-09-30T00:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.499177 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.499248 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.499266 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.499291 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.499308 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:48Z","lastTransitionTime":"2025-09-30T00:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.603085 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.603144 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.603162 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.603187 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.603204 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:48Z","lastTransitionTime":"2025-09-30T00:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.705908 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.705971 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.705993 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.706020 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.706044 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:48Z","lastTransitionTime":"2025-09-30T00:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.809230 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.809312 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.809336 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.809364 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.809383 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:48Z","lastTransitionTime":"2025-09-30T00:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.912767 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.912886 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.912912 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.912948 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:48 crc kubenswrapper[4809]: I0930 00:09:48.912970 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:48Z","lastTransitionTime":"2025-09-30T00:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.015703 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.015790 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.015822 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.015855 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.015877 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:49Z","lastTransitionTime":"2025-09-30T00:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.119073 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.119129 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.119145 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.119170 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.119188 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:49Z","lastTransitionTime":"2025-09-30T00:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.222900 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.222979 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.223006 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.223038 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.223059 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:49Z","lastTransitionTime":"2025-09-30T00:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.326217 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.326289 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.326307 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.326337 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.326364 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:49Z","lastTransitionTime":"2025-09-30T00:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.430252 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.430332 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.430357 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.430389 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.430412 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:49Z","lastTransitionTime":"2025-09-30T00:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.533849 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.533923 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.533942 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.533976 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.533997 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:49Z","lastTransitionTime":"2025-09-30T00:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.637812 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.637904 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.637919 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.637944 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.637988 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:49Z","lastTransitionTime":"2025-09-30T00:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.690503 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.690618 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.690684 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.690518 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:49 crc kubenswrapper[4809]: E0930 00:09:49.690928 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:09:49 crc kubenswrapper[4809]: E0930 00:09:49.691021 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:09:49 crc kubenswrapper[4809]: E0930 00:09:49.691125 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:09:49 crc kubenswrapper[4809]: E0930 00:09:49.691317 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.712268 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:49Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.730354 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"830a7e52-5a85-4555-8bcb-57828953b475\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e6afe1ae0b09081f5403143b4305f74ea3cac0f0ced46daef33d5601a15e7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5573285e4e2c628192a641224044c3a187785a35ff8f698318b1b2bd727d4dbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qdg49\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:49Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.742301 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.742393 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.742423 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.742464 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.742491 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:49Z","lastTransitionTime":"2025-09-30T00:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.758467 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:49Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.775102 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:49Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.799117 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:49Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.822201 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:49Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.843558 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43a82899-34fa-4d03-9856-7303839926c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4ktzq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:49Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.846706 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.846791 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.846813 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.846842 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.846870 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:49Z","lastTransitionTime":"2025-09-30T00:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.870501 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:49Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.887440 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:49Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.908093 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:49Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.926950 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:49Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.941015 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:49Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.942475 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs\") pod \"network-metrics-daemon-4ktzq\" (UID: \"43a82899-34fa-4d03-9856-7303839926c2\") " pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:09:49 crc kubenswrapper[4809]: E0930 00:09:49.942723 4809 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 00:09:49 crc kubenswrapper[4809]: E0930 00:09:49.942840 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs podName:43a82899-34fa-4d03-9856-7303839926c2 nodeName:}" failed. No retries permitted until 2025-09-30 00:09:53.942810288 +0000 UTC m=+44.979059716 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs") pod "network-metrics-daemon-4ktzq" (UID: "43a82899-34fa-4d03-9856-7303839926c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.950478 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.950539 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.950561 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.950588 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.950608 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:49Z","lastTransitionTime":"2025-09-30T00:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.960902 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:49Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:49 crc kubenswrapper[4809]: I0930 00:09:49.984533 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:49Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.003611 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:50Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.026508 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:50Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.055539 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.055660 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.055680 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.055709 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.055727 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:50Z","lastTransitionTime":"2025-09-30T00:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.060954 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a21d25f6284fc8fcaf593e0c9b5ab97a0a2ef2f2165c27d96a3baa2a9077de61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:41Z\\\",\\\"message\\\":\\\"ions/factory.go:140\\\\nI0930 00:09:41.148915 6076 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 00:09:41.148943 6076 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 00:09:41.149150 6076 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 00:09:41.149480 6076 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 00:09:41.150624 6076 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:09:41.150697 6076 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:09:41.150736 6076 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 00:09:41.150753 6076 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:09:41.150773 6076 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:09:41.150889 6076 factory.go:656] Stopping watch factory\\\\nI0930 00:09:41.150917 6076 ovnkube.go:599] Stopped ovnkube\\\\nI0930 00:09:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:42Z\\\",\\\"message\\\":\\\" 6200 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z]\\\\nI0930 00:09:42.867631 6200 admin_network_policy_pod.go:56] Processing sync for Pod openshift-network-node-identity/network-node-identity-vrzqb in Admin Network Policy controller\\\\nI0930 00:09:42.867676 6200 admin_network_policy_pod.go:59] Finished syncing Pod openshift-network-node-identity/network-node-identity-vrzqb Admin Network Policy controller: took 46.511µs\\\\nI0930 00:09:42.867694 6200 admin_network_policy_pod.go:56] Processing sync for Pod openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj in Admin Network Policy controller\\\\nI0930 00:09:42.867707 6200 admin_network_policy_pod.go:59] Finished syncing Pod openshift-opera\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:50Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.159722 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.159992 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.160012 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.160046 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.160069 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:50Z","lastTransitionTime":"2025-09-30T00:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.263292 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.263361 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.263373 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.263427 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.263445 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:50Z","lastTransitionTime":"2025-09-30T00:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.367743 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.367813 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.367830 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.367855 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.367871 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:50Z","lastTransitionTime":"2025-09-30T00:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.471338 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.471407 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.471425 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.471451 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.471472 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:50Z","lastTransitionTime":"2025-09-30T00:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.574514 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.574587 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.574604 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.574630 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.574703 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:50Z","lastTransitionTime":"2025-09-30T00:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.678843 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.678894 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.678908 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.678926 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.678937 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:50Z","lastTransitionTime":"2025-09-30T00:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.690761 4809 scope.go:117] "RemoveContainer" containerID="1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.781575 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.781695 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.781726 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.781759 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.781782 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:50Z","lastTransitionTime":"2025-09-30T00:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.884317 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.884381 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.884405 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.884629 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.884701 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:50Z","lastTransitionTime":"2025-09-30T00:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.987533 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.987604 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.987621 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.987673 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.987693 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:50Z","lastTransitionTime":"2025-09-30T00:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:50 crc kubenswrapper[4809]: I0930 00:09:50.997465 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.001257 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f26d9171f78f77e894d1ec968ed7a51f3bd6761bf1eb8ace2ce322c945c48f49"} Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.002129 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.021104 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:51Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.038931 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:51Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.056586 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26d9171f78f77e894d1ec968ed7a51f3bd6761bf1eb8ace2ce322c945c48f49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:51Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.072462 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:51Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.087465 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:51Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.090696 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.090755 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.090771 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.090796 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.090815 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:51Z","lastTransitionTime":"2025-09-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.104301 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:51Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.116622 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:51Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.131230 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:51Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.153810 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a21d25f6284fc8fcaf593e0c9b5ab97a0a2ef2f2165c27d96a3baa2a9077de61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:41Z\\\",\\\"message\\\":\\\"ions/factory.go:140\\\\nI0930 00:09:41.148915 6076 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 00:09:41.148943 6076 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0930 00:09:41.149150 6076 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 00:09:41.149480 6076 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 00:09:41.150624 6076 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:09:41.150697 6076 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:09:41.150736 6076 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 00:09:41.150753 6076 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:09:41.150773 6076 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:09:41.150889 6076 factory.go:656] Stopping watch factory\\\\nI0930 00:09:41.150917 6076 ovnkube.go:599] Stopped ovnkube\\\\nI0930 00:09:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:42Z\\\",\\\"message\\\":\\\" 6200 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z]\\\\nI0930 00:09:42.867631 6200 admin_network_policy_pod.go:56] Processing sync for Pod openshift-network-node-identity/network-node-identity-vrzqb in Admin Network Policy controller\\\\nI0930 00:09:42.867676 6200 admin_network_policy_pod.go:59] Finished syncing Pod openshift-network-node-identity/network-node-identity-vrzqb Admin Network Policy controller: took 46.511µs\\\\nI0930 00:09:42.867694 6200 admin_network_policy_pod.go:56] Processing sync for Pod openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj in Admin Network Policy controller\\\\nI0930 00:09:42.867707 6200 admin_network_policy_pod.go:59] Finished syncing Pod openshift-opera\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:51Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.168010 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:51Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.190551 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:51Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.193429 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.193518 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.193541 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.193574 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.193597 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:51Z","lastTransitionTime":"2025-09-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.206526 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:51Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.222752 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:51Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.237018 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:51Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.257851 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:51Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.272342 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"830a7e52-5a85-4555-8bcb-57828953b475\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e6afe1ae0b09081f5403143b4305f74ea3cac0f0ced46daef33d5601a15e7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5573285e4e2c628192a641224044c3a187785a35ff8f698318b1b2bd727d4dbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qdg49\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:51Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.288828 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43a82899-34fa-4d03-9856-7303839926c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4ktzq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:51Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.296975 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.297019 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.297030 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.297049 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.297061 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:51Z","lastTransitionTime":"2025-09-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.399822 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.399868 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.399880 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.399896 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.399907 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:51Z","lastTransitionTime":"2025-09-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.502289 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.502329 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.502340 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.502358 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.502371 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:51Z","lastTransitionTime":"2025-09-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.605326 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.605386 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.605405 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.605432 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.605449 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:51Z","lastTransitionTime":"2025-09-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.690972 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.691025 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.691045 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.690982 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:51 crc kubenswrapper[4809]: E0930 00:09:51.691159 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:09:51 crc kubenswrapper[4809]: E0930 00:09:51.691319 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:09:51 crc kubenswrapper[4809]: E0930 00:09:51.691428 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:09:51 crc kubenswrapper[4809]: E0930 00:09:51.691546 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.708704 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.708751 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.708764 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.708782 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.708793 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:51Z","lastTransitionTime":"2025-09-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.812183 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.812249 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.812268 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.812294 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.812311 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:51Z","lastTransitionTime":"2025-09-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.920491 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.920561 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.920585 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.920614 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:51 crc kubenswrapper[4809]: I0930 00:09:51.920669 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:51Z","lastTransitionTime":"2025-09-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.022990 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.023035 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.023050 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.023069 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.023084 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:52Z","lastTransitionTime":"2025-09-30T00:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.125176 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.125245 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.125267 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.125298 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.125320 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:52Z","lastTransitionTime":"2025-09-30T00:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.228115 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.228188 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.228205 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.228223 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.228239 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:52Z","lastTransitionTime":"2025-09-30T00:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.331138 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.331186 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.331197 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.331214 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.331225 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:52Z","lastTransitionTime":"2025-09-30T00:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.433720 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.433825 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.433856 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.433889 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.433914 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:52Z","lastTransitionTime":"2025-09-30T00:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.537161 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.537225 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.537238 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.537256 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.537268 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:52Z","lastTransitionTime":"2025-09-30T00:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.639630 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.639698 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.639716 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.639735 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.639750 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:52Z","lastTransitionTime":"2025-09-30T00:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.742732 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.742795 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.742811 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.742830 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.742843 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:52Z","lastTransitionTime":"2025-09-30T00:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.847159 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.847242 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.847268 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.847301 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.847325 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:52Z","lastTransitionTime":"2025-09-30T00:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.951636 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.951749 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.951767 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.951795 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:52 crc kubenswrapper[4809]: I0930 00:09:52.951816 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:52Z","lastTransitionTime":"2025-09-30T00:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.054555 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.054614 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.054631 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.054693 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.054710 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:53Z","lastTransitionTime":"2025-09-30T00:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.158936 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.159009 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.159033 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.159067 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.159090 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:53Z","lastTransitionTime":"2025-09-30T00:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.261450 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.261515 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.261539 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.261568 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.261591 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:53Z","lastTransitionTime":"2025-09-30T00:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.364477 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.364544 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.364555 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.364574 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.364586 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:53Z","lastTransitionTime":"2025-09-30T00:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.467461 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.467843 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.468038 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.468195 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.468343 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:53Z","lastTransitionTime":"2025-09-30T00:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.571560 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.571897 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.571986 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.572085 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.572176 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:53Z","lastTransitionTime":"2025-09-30T00:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.583196 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.584883 4809 scope.go:117] "RemoveContainer" containerID="a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.600037 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:53Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.625332 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:53Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.650681 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:53Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.667740 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:53Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.678425 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.678482 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.678496 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.678516 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.678532 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:53Z","lastTransitionTime":"2025-09-30T00:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.686207 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:53Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.689925 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:53 crc kubenswrapper[4809]: E0930 00:09:53.690297 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.690138 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:53 crc kubenswrapper[4809]: E0930 00:09:53.690431 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.690142 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:09:53 crc kubenswrapper[4809]: E0930 00:09:53.690557 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.690105 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:53 crc kubenswrapper[4809]: E0930 00:09:53.690716 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.723073 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:42Z\\\",\\\"message\\\":\\\" 6200 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z]\\\\nI0930 00:09:42.867631 6200 admin_network_policy_pod.go:56] Processing sync for Pod openshift-network-node-identity/network-node-identity-vrzqb in Admin Network Policy controller\\\\nI0930 00:09:42.867676 6200 admin_network_policy_pod.go:59] Finished syncing Pod openshift-network-node-identity/network-node-identity-vrzqb Admin Network Policy controller: took 46.511µs\\\\nI0930 00:09:42.867694 6200 admin_network_policy_pod.go:56] Processing sync for Pod openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj in Admin Network Policy controller\\\\nI0930 00:09:42.867707 6200 admin_network_policy_pod.go:59] Finished syncing Pod openshift-opera\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-lkdqg_openshift-ovn-kubernetes(5ec79e76-2a92-48e7-a55f-f8e630b00ed5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:53Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.739238 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:53Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.760494 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:53Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.781604 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:53Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.781705 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.781793 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.781810 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.781837 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.781855 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:53Z","lastTransitionTime":"2025-09-30T00:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.798349 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:53Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.814125 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:53Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.829022 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:53Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.841434 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"830a7e52-5a85-4555-8bcb-57828953b475\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e6afe1ae0b09081f5403143b4305f74ea3cac0f0ced46daef33d5601a15e7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5573285e4e2c628192a641224044c3a187785a35ff8f698318b1b2bd727d4dbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qdg49\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:53Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.850928 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43a82899-34fa-4d03-9856-7303839926c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4ktzq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:53Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.862085 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:53Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.874489 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:53Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.884859 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.884917 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.884937 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.884963 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.884984 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:53Z","lastTransitionTime":"2025-09-30T00:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.890157 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26d9171f78f77e894d1ec968ed7a51f3bd6761bf1eb8ace2ce322c945c48f49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:53Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.994428 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs\") pod \"network-metrics-daemon-4ktzq\" (UID: \"43a82899-34fa-4d03-9856-7303839926c2\") " pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:09:53 crc kubenswrapper[4809]: E0930 00:09:53.994556 4809 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 00:09:53 crc kubenswrapper[4809]: E0930 00:09:53.994665 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs podName:43a82899-34fa-4d03-9856-7303839926c2 nodeName:}" failed. No retries permitted until 2025-09-30 00:10:01.994618085 +0000 UTC m=+53.030867583 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs") pod "network-metrics-daemon-4ktzq" (UID: "43a82899-34fa-4d03-9856-7303839926c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.995356 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.995392 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.995402 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.995417 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:53 crc kubenswrapper[4809]: I0930 00:09:53.995428 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:53Z","lastTransitionTime":"2025-09-30T00:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.012911 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lkdqg_5ec79e76-2a92-48e7-a55f-f8e630b00ed5/ovnkube-controller/1.log" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.015621 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerStarted","Data":"f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea"} Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.016822 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.039465 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:54Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.056489 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:54Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.072087 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:54Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.087192 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:54Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.097632 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.097689 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.097699 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.097716 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.097727 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:54Z","lastTransitionTime":"2025-09-30T00:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.102583 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"830a7e52-5a85-4555-8bcb-57828953b475\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e6afe1ae0b09081f5403143b4305f74ea3cac0f0ced46daef33d5601a15e7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5573285e4e2c628192a641224044c3a187785a35ff8f698318b1b2bd727d4dbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qdg49\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:54Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.133708 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:54Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.144108 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43a82899-34fa-4d03-9856-7303839926c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4ktzq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:54Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.156697 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26d9171f78f77e894d1ec968ed7a51f3bd6761bf1eb8ace2ce322c945c48f49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:54Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.168544 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:54Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.186682 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:54Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.200743 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.200787 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.200799 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.200813 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.200822 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:54Z","lastTransitionTime":"2025-09-30T00:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.211308 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:54Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.223035 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:54Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.238299 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:54Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.247698 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:54Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.257753 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:54Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.267861 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:54Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.283102 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:42Z\\\",\\\"message\\\":\\\" 6200 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z]\\\\nI0930 00:09:42.867631 6200 admin_network_policy_pod.go:56] Processing sync for Pod openshift-network-node-identity/network-node-identity-vrzqb in Admin Network Policy controller\\\\nI0930 00:09:42.867676 6200 admin_network_policy_pod.go:59] Finished syncing Pod openshift-network-node-identity/network-node-identity-vrzqb Admin Network Policy controller: took 46.511µs\\\\nI0930 00:09:42.867694 6200 admin_network_policy_pod.go:56] Processing sync for Pod openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj in Admin Network Policy controller\\\\nI0930 00:09:42.867707 6200 admin_network_policy_pod.go:59] Finished syncing Pod openshift-opera\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:54Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.302457 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.302489 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.302498 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.302513 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.302524 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:54Z","lastTransitionTime":"2025-09-30T00:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.404391 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.404420 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.404428 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.404441 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.404450 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:54Z","lastTransitionTime":"2025-09-30T00:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.507867 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.507953 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.507977 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.508006 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.508028 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:54Z","lastTransitionTime":"2025-09-30T00:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.610895 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.610951 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.610998 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.611024 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.611042 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:54Z","lastTransitionTime":"2025-09-30T00:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.713672 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.713706 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.713714 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.713728 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.713743 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:54Z","lastTransitionTime":"2025-09-30T00:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.816386 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.816462 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.816485 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.816514 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.816533 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:54Z","lastTransitionTime":"2025-09-30T00:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.920001 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.920082 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.920106 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.920136 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:54 crc kubenswrapper[4809]: I0930 00:09:54.920162 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:54Z","lastTransitionTime":"2025-09-30T00:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.022449 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lkdqg_5ec79e76-2a92-48e7-a55f-f8e630b00ed5/ovnkube-controller/2.log" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.023447 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.023495 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.023513 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.023536 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.023555 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:55Z","lastTransitionTime":"2025-09-30T00:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.023801 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lkdqg_5ec79e76-2a92-48e7-a55f-f8e630b00ed5/ovnkube-controller/1.log" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.028877 4809 generic.go:334] "Generic (PLEG): container finished" podID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerID="f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea" exitCode=1 Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.028945 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerDied","Data":"f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea"} Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.029012 4809 scope.go:117] "RemoveContainer" containerID="a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.030047 4809 scope.go:117] "RemoveContainer" containerID="f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea" Sep 30 00:09:55 crc kubenswrapper[4809]: E0930 00:09:55.030303 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lkdqg_openshift-ovn-kubernetes(5ec79e76-2a92-48e7-a55f-f8e630b00ed5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.055764 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26d9171f78f77e894d1ec968ed7a51f3bd6761bf1eb8ace2ce322c945c48f49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:55Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.077787 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:55Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.096553 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:55Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.118206 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:55Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.126400 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.126480 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.126505 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.126540 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.126561 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:55Z","lastTransitionTime":"2025-09-30T00:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.136454 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:55Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.159779 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:55Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.177865 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:55Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.199104 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:55Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.219363 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:55Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.229453 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.229512 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.229549 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.229589 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.229612 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:55Z","lastTransitionTime":"2025-09-30T00:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.251550 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8e69c7cb46c031b2b619f4c1175317e82578dc2300df44c56bbf69b621414ec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:42Z\\\",\\\"message\\\":\\\" 6200 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:42Z is after 2025-08-24T17:21:41Z]\\\\nI0930 00:09:42.867631 6200 admin_network_policy_pod.go:56] Processing sync for Pod openshift-network-node-identity/network-node-identity-vrzqb in Admin Network Policy controller\\\\nI0930 00:09:42.867676 6200 admin_network_policy_pod.go:59] Finished syncing Pod openshift-network-node-identity/network-node-identity-vrzqb Admin Network Policy controller: took 46.511µs\\\\nI0930 00:09:42.867694 6200 admin_network_policy_pod.go:56] Processing sync for Pod openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj in Admin Network Policy controller\\\\nI0930 00:09:42.867707 6200 admin_network_policy_pod.go:59] Finished syncing Pod openshift-opera\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:54Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 00:09:54.631531 6439 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 00:09:54.631568 6439 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:09:54.631603 6439 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 00:09:54.631625 6439 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 00:09:54.631633 6439 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 00:09:54.631683 6439 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 00:09:54.631694 6439 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 00:09:54.631707 6439 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 00:09:54.631701 6439 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:09:54.631720 6439 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 00:09:54.631728 6439 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:09:54.631739 6439 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 00:09:54.631753 6439 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:09:54.631795 6439 factory.go:656] Stopping watch factory\\\\nI0930 00:09:54.631810 6439 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:55Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.268330 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:55Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.285156 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:55Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.306314 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:55Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.321249 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:55Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.336749 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.336805 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.336825 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.336858 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.336877 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:55Z","lastTransitionTime":"2025-09-30T00:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.342942 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"830a7e52-5a85-4555-8bcb-57828953b475\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e6afe1ae0b09081f5403143b4305f74ea3cac0f0ced46daef33d5601a15e7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5573285e4e2c628192a641224044c3a187785a35ff8f698318b1b2bd727d4dbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qdg49\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:55Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.363625 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:55Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.374443 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43a82899-34fa-4d03-9856-7303839926c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4ktzq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:55Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.440294 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.440368 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.440392 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.440420 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.440436 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:55Z","lastTransitionTime":"2025-09-30T00:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.543735 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.543815 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.543839 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.543868 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.543885 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:55Z","lastTransitionTime":"2025-09-30T00:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.647163 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.647214 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.647227 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.647245 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.647256 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:55Z","lastTransitionTime":"2025-09-30T00:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.690057 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:09:55 crc kubenswrapper[4809]: E0930 00:09:55.690473 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.690112 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:55 crc kubenswrapper[4809]: E0930 00:09:55.690819 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.690058 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.690142 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:55 crc kubenswrapper[4809]: E0930 00:09:55.690922 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:09:55 crc kubenswrapper[4809]: E0930 00:09:55.691063 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.750222 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.750336 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.750355 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.750376 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.750393 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:55Z","lastTransitionTime":"2025-09-30T00:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.854274 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.854340 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.854359 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.854383 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.854402 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:55Z","lastTransitionTime":"2025-09-30T00:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.957596 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.957692 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.957715 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.957739 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:55 crc kubenswrapper[4809]: I0930 00:09:55.957755 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:55Z","lastTransitionTime":"2025-09-30T00:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.035355 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lkdqg_5ec79e76-2a92-48e7-a55f-f8e630b00ed5/ovnkube-controller/2.log" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.040489 4809 scope.go:117] "RemoveContainer" containerID="f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea" Sep 30 00:09:56 crc kubenswrapper[4809]: E0930 00:09:56.040823 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lkdqg_openshift-ovn-kubernetes(5ec79e76-2a92-48e7-a55f-f8e630b00ed5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.060192 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.060727 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.060778 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.060802 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.060830 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.060852 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:56Z","lastTransitionTime":"2025-09-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.087085 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.112625 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26d9171f78f77e894d1ec968ed7a51f3bd6761bf1eb8ace2ce322c945c48f49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.128425 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.151924 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.163702 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.163775 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.163792 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.163819 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.163836 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:56Z","lastTransitionTime":"2025-09-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.177176 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.191368 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.207409 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.233230 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:54Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 00:09:54.631531 6439 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 00:09:54.631568 6439 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:09:54.631603 6439 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 00:09:54.631625 6439 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 00:09:54.631633 6439 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 00:09:54.631683 6439 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 00:09:54.631694 6439 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 00:09:54.631707 6439 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 00:09:54.631701 6439 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:09:54.631720 6439 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 00:09:54.631728 6439 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:09:54.631739 6439 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 00:09:54.631753 6439 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:09:54.631795 6439 factory.go:656] Stopping watch factory\\\\nI0930 00:09:54.631810 6439 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lkdqg_openshift-ovn-kubernetes(5ec79e76-2a92-48e7-a55f-f8e630b00ed5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.245922 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.265622 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.265677 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.265687 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.265704 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.265720 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:56Z","lastTransitionTime":"2025-09-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.278434 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.296825 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.315222 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.333633 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.353422 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.365972 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"830a7e52-5a85-4555-8bcb-57828953b475\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e6afe1ae0b09081f5403143b4305f74ea3cac0f0ced46daef33d5601a15e7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5573285e4e2c628192a641224044c3a187785a35ff8f698318b1b2bd727d4dbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qdg49\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.367709 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.367743 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.367754 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.367770 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.367781 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:56Z","lastTransitionTime":"2025-09-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.376414 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43a82899-34fa-4d03-9856-7303839926c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4ktzq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.471250 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.471329 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.471355 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.471390 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.471415 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:56Z","lastTransitionTime":"2025-09-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.574512 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.574574 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.574595 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.574617 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.574631 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:56Z","lastTransitionTime":"2025-09-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.591666 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.591734 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.591752 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.591779 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.591796 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:56Z","lastTransitionTime":"2025-09-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:56 crc kubenswrapper[4809]: E0930 00:09:56.615031 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.620971 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.621040 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.621065 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.621094 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.621118 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:56Z","lastTransitionTime":"2025-09-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:56 crc kubenswrapper[4809]: E0930 00:09:56.646535 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.653880 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.654142 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.654299 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.654456 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.654602 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:56Z","lastTransitionTime":"2025-09-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:56 crc kubenswrapper[4809]: E0930 00:09:56.707068 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.711255 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.711380 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.711463 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.711545 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.711662 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:56Z","lastTransitionTime":"2025-09-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:56 crc kubenswrapper[4809]: E0930 00:09:56.726079 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.729410 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.729431 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.729439 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.729452 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.729461 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:56Z","lastTransitionTime":"2025-09-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:56 crc kubenswrapper[4809]: E0930 00:09:56.742592 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:56Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:56 crc kubenswrapper[4809]: E0930 00:09:56.743014 4809 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.745607 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.745763 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.745869 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.745992 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.746072 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:56Z","lastTransitionTime":"2025-09-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.849419 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.849464 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.849476 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.849527 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.849542 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:56Z","lastTransitionTime":"2025-09-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.953255 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.953318 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.953338 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.953368 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:56 crc kubenswrapper[4809]: I0930 00:09:56.953389 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:56Z","lastTransitionTime":"2025-09-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.056723 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.056776 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.056797 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.056825 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.056846 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:57Z","lastTransitionTime":"2025-09-30T00:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.159395 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.159467 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.159479 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.159497 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.159508 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:57Z","lastTransitionTime":"2025-09-30T00:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.262411 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.262477 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.262500 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.262529 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.262551 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:57Z","lastTransitionTime":"2025-09-30T00:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.364892 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.364951 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.364967 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.364985 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.364998 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:57Z","lastTransitionTime":"2025-09-30T00:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.468275 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.468328 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.468345 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.468369 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.468387 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:57Z","lastTransitionTime":"2025-09-30T00:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.571921 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.571988 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.571999 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.572016 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.572028 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:57Z","lastTransitionTime":"2025-09-30T00:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.674941 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.675016 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.675033 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.675064 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.675082 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:57Z","lastTransitionTime":"2025-09-30T00:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.690380 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.690436 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.690475 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:09:57 crc kubenswrapper[4809]: E0930 00:09:57.690557 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.690576 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:57 crc kubenswrapper[4809]: E0930 00:09:57.690770 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:09:57 crc kubenswrapper[4809]: E0930 00:09:57.690917 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:09:57 crc kubenswrapper[4809]: E0930 00:09:57.691159 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.778471 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.778583 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.778602 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.778628 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.778676 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:57Z","lastTransitionTime":"2025-09-30T00:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.881514 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.881573 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.881590 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.881613 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.881629 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:57Z","lastTransitionTime":"2025-09-30T00:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.984004 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.984087 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.984107 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.984161 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:57 crc kubenswrapper[4809]: I0930 00:09:57.984180 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:57Z","lastTransitionTime":"2025-09-30T00:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.090440 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.090991 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.091020 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.091046 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.091069 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:58Z","lastTransitionTime":"2025-09-30T00:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.193730 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.193775 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.193786 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.193802 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.193815 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:58Z","lastTransitionTime":"2025-09-30T00:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.296944 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.297002 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.297026 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.297054 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.297077 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:58Z","lastTransitionTime":"2025-09-30T00:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.400112 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.400190 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.400211 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.400241 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.400262 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:58Z","lastTransitionTime":"2025-09-30T00:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.502350 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.502406 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.502427 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.502454 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.502475 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:58Z","lastTransitionTime":"2025-09-30T00:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.606125 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.606195 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.606215 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.606242 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.606265 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:58Z","lastTransitionTime":"2025-09-30T00:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.709305 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.709402 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.709425 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.709448 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.709465 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:58Z","lastTransitionTime":"2025-09-30T00:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.812822 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.812861 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.812872 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.812888 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.812899 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:58Z","lastTransitionTime":"2025-09-30T00:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.915732 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.915764 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.915773 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.915786 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:58 crc kubenswrapper[4809]: I0930 00:09:58.915795 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:58Z","lastTransitionTime":"2025-09-30T00:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.018393 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.018450 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.018469 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.018493 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.018511 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:59Z","lastTransitionTime":"2025-09-30T00:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.122111 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.122198 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.122216 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.122238 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.122286 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:59Z","lastTransitionTime":"2025-09-30T00:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.225881 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.226005 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.226075 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.226110 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.226328 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:59Z","lastTransitionTime":"2025-09-30T00:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.329994 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.330095 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.330119 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.330187 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.330215 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:59Z","lastTransitionTime":"2025-09-30T00:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.433476 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.433556 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.433572 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.433587 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.433597 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:59Z","lastTransitionTime":"2025-09-30T00:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.537268 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.537307 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.537318 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.537333 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.537342 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:59Z","lastTransitionTime":"2025-09-30T00:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.640439 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.640699 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.640735 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.640767 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.640787 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:59Z","lastTransitionTime":"2025-09-30T00:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.690859 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.690981 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:09:59 crc kubenswrapper[4809]: E0930 00:09:59.691229 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.691278 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.691274 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:09:59 crc kubenswrapper[4809]: E0930 00:09:59.691450 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:09:59 crc kubenswrapper[4809]: E0930 00:09:59.691704 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:09:59 crc kubenswrapper[4809]: E0930 00:09:59.693930 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.710980 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:59Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.730395 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:59Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.744300 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.744393 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.744415 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.744441 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.744461 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:59Z","lastTransitionTime":"2025-09-30T00:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.746376 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:59Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.767607 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:59Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.790253 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:59Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.815400 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:59Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.835101 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:54Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 00:09:54.631531 6439 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 00:09:54.631568 6439 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:09:54.631603 6439 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 00:09:54.631625 6439 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 00:09:54.631633 6439 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 00:09:54.631683 6439 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 00:09:54.631694 6439 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 00:09:54.631707 6439 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 00:09:54.631701 6439 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:09:54.631720 6439 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 00:09:54.631728 6439 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:09:54.631739 6439 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 00:09:54.631753 6439 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:09:54.631795 6439 factory.go:656] Stopping watch factory\\\\nI0930 00:09:54.631810 6439 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lkdqg_openshift-ovn-kubernetes(5ec79e76-2a92-48e7-a55f-f8e630b00ed5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:59Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.846433 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.846474 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.846486 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.846504 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.846515 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:59Z","lastTransitionTime":"2025-09-30T00:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.852829 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:59Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.868311 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:59Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.880763 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:59Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.895106 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"830a7e52-5a85-4555-8bcb-57828953b475\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e6afe1ae0b09081f5403143b4305f74ea3cac0f0ced46daef33d5601a15e7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5573285e4e2c628192a641224044c3a187785a35ff8f698318b1b2bd727d4dbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qdg49\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:59Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.919520 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:59Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.937030 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:59Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.949166 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.949222 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.949239 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.949263 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.949282 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:09:59Z","lastTransitionTime":"2025-09-30T00:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.949407 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43a82899-34fa-4d03-9856-7303839926c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4ktzq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:59Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.971153 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26d9171f78f77e894d1ec968ed7a51f3bd6761bf1eb8ace2ce322c945c48f49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:59Z is after 2025-08-24T17:21:41Z" Sep 30 00:09:59 crc kubenswrapper[4809]: I0930 00:09:59.988515 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:09:59Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.005136 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:00Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.052886 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.052971 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.052995 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.053026 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.053048 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:00Z","lastTransitionTime":"2025-09-30T00:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.156453 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.156517 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.156541 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.156573 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.156596 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:00Z","lastTransitionTime":"2025-09-30T00:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.259847 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.259992 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.260018 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.260043 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.260060 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:00Z","lastTransitionTime":"2025-09-30T00:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.363083 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.363207 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.363230 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.363262 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.363284 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:00Z","lastTransitionTime":"2025-09-30T00:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.466194 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.466265 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.466282 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.466305 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.466323 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:00Z","lastTransitionTime":"2025-09-30T00:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.568876 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.568936 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.568952 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.568977 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.568994 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:00Z","lastTransitionTime":"2025-09-30T00:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.671590 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.671737 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.671762 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.671792 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.671814 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:00Z","lastTransitionTime":"2025-09-30T00:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.775709 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.775775 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.775797 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.775855 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.775869 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:00Z","lastTransitionTime":"2025-09-30T00:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.879043 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.879095 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.879111 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.879133 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.879149 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:00Z","lastTransitionTime":"2025-09-30T00:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.983170 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.983289 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.983313 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.983343 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:00 crc kubenswrapper[4809]: I0930 00:10:00.983367 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:00Z","lastTransitionTime":"2025-09-30T00:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.086429 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.086495 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.086511 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.086537 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.086554 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:01Z","lastTransitionTime":"2025-09-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.189940 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.190004 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.190020 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.190044 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.190063 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:01Z","lastTransitionTime":"2025-09-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.293396 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.293441 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.293461 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.293490 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.293512 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:01Z","lastTransitionTime":"2025-09-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.371401 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.371532 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:01 crc kubenswrapper[4809]: E0930 00:10:01.371577 4809 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 00:10:01 crc kubenswrapper[4809]: E0930 00:10:01.371747 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 00:10:33.371709706 +0000 UTC m=+84.407959174 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 00:10:01 crc kubenswrapper[4809]: E0930 00:10:01.371783 4809 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 00:10:01 crc kubenswrapper[4809]: E0930 00:10:01.371885 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 00:10:33.37185041 +0000 UTC m=+84.408099888 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.396574 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.396627 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.396682 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.396714 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.396737 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:01Z","lastTransitionTime":"2025-09-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.471984 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:10:01 crc kubenswrapper[4809]: E0930 00:10:01.472215 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:10:33.472183818 +0000 UTC m=+84.508433256 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:10:01 crc kubenswrapper[4809]: E0930 00:10:01.472460 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 00:10:01 crc kubenswrapper[4809]: E0930 00:10:01.472511 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 00:10:01 crc kubenswrapper[4809]: E0930 00:10:01.472531 4809 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:10:01 crc kubenswrapper[4809]: E0930 00:10:01.472630 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 00:10:33.47260215 +0000 UTC m=+84.508851598 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.472160 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.474795 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:01 crc kubenswrapper[4809]: E0930 00:10:01.475064 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 00:10:01 crc kubenswrapper[4809]: E0930 00:10:01.475100 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 00:10:01 crc kubenswrapper[4809]: E0930 00:10:01.475124 4809 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:10:01 crc kubenswrapper[4809]: E0930 00:10:01.476631 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 00:10:33.476559737 +0000 UTC m=+84.512809185 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.500163 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.500220 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.500242 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.500271 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.500293 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:01Z","lastTransitionTime":"2025-09-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.604084 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.604186 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.604210 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.604247 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.604270 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:01Z","lastTransitionTime":"2025-09-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.689913 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.690073 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:01 crc kubenswrapper[4809]: E0930 00:10:01.690156 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.689927 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.689931 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:01 crc kubenswrapper[4809]: E0930 00:10:01.690250 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:01 crc kubenswrapper[4809]: E0930 00:10:01.690320 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:01 crc kubenswrapper[4809]: E0930 00:10:01.690458 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.709003 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.709055 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.709071 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.709106 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.709120 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:01Z","lastTransitionTime":"2025-09-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.812795 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.812860 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.812882 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.812911 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.812934 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:01Z","lastTransitionTime":"2025-09-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.916140 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.916198 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.916214 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.916238 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:01 crc kubenswrapper[4809]: I0930 00:10:01.916256 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:01Z","lastTransitionTime":"2025-09-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.019146 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.019263 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.019284 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.019309 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.019327 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:02Z","lastTransitionTime":"2025-09-30T00:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.082421 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs\") pod \"network-metrics-daemon-4ktzq\" (UID: \"43a82899-34fa-4d03-9856-7303839926c2\") " pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:02 crc kubenswrapper[4809]: E0930 00:10:02.082623 4809 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 00:10:02 crc kubenswrapper[4809]: E0930 00:10:02.082775 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs podName:43a82899-34fa-4d03-9856-7303839926c2 nodeName:}" failed. No retries permitted until 2025-09-30 00:10:18.082742874 +0000 UTC m=+69.118992312 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs") pod "network-metrics-daemon-4ktzq" (UID: "43a82899-34fa-4d03-9856-7303839926c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.122913 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.123013 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.123045 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.123078 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.123100 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:02Z","lastTransitionTime":"2025-09-30T00:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.138789 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.161477 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.183224 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.205294 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.225898 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.226954 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.227024 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.227044 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.227075 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.227098 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:02Z","lastTransitionTime":"2025-09-30T00:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.247228 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"830a7e52-5a85-4555-8bcb-57828953b475\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e6afe1ae0b09081f5403143b4305f74ea3cac0f0ced46daef33d5601a15e7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5573285e4e2c628192a641224044c3a187785a35ff8f698318b1b2bd727d4dbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qdg49\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.284932 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.302697 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43a82899-34fa-4d03-9856-7303839926c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4ktzq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.320804 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26d9171f78f77e894d1ec968ed7a51f3bd6761bf1eb8ace2ce322c945c48f49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.329914 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.329978 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.329994 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.330065 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.330083 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:02Z","lastTransitionTime":"2025-09-30T00:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.339723 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.357943 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.376634 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.394016 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.415157 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.430971 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.435870 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.435946 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.435971 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.436003 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.436029 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:02Z","lastTransitionTime":"2025-09-30T00:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.454314 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.474782 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.493769 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:54Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 00:09:54.631531 6439 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 00:09:54.631568 6439 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:09:54.631603 6439 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 00:09:54.631625 6439 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 00:09:54.631633 6439 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 00:09:54.631683 6439 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 00:09:54.631694 6439 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 00:09:54.631707 6439 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 00:09:54.631701 6439 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:09:54.631720 6439 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 00:09:54.631728 6439 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:09:54.631739 6439 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 00:09:54.631753 6439 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:09:54.631795 6439 factory.go:656] Stopping watch factory\\\\nI0930 00:09:54.631810 6439 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lkdqg_openshift-ovn-kubernetes(5ec79e76-2a92-48e7-a55f-f8e630b00ed5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.538325 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.538385 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.538404 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.538438 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.538455 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:02Z","lastTransitionTime":"2025-09-30T00:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.644756 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.644857 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.644907 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.644935 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.644989 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:02Z","lastTransitionTime":"2025-09-30T00:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.723635 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.740634 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.741789 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"830a7e52-5a85-4555-8bcb-57828953b475\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e6afe1ae0b09081f5403143b4305f74ea3cac0f0ced46daef33d5601a15e7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5573285e4e2c628192a641224044c3a187785a35ff8f698318b1b2bd727d4dbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qdg49\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.749286 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.749376 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.749390 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.749430 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.749447 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:02Z","lastTransitionTime":"2025-09-30T00:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.776389 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.794102 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.815951 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.835141 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.850677 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.852209 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.852266 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.852286 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.852313 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.852333 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:02Z","lastTransitionTime":"2025-09-30T00:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.864796 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43a82899-34fa-4d03-9856-7303839926c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4ktzq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.881877 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26d9171f78f77e894d1ec968ed7a51f3bd6761bf1eb8ace2ce322c945c48f49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.895825 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.909006 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.923006 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.934865 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.952533 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.954904 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.954949 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.954967 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.954989 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.955005 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:02Z","lastTransitionTime":"2025-09-30T00:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.971847 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.982204 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:02 crc kubenswrapper[4809]: I0930 00:10:02.996193 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:02Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.016833 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:54Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 00:09:54.631531 6439 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 00:09:54.631568 6439 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:09:54.631603 6439 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 00:09:54.631625 6439 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 00:09:54.631633 6439 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 00:09:54.631683 6439 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 00:09:54.631694 6439 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 00:09:54.631707 6439 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 00:09:54.631701 6439 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:09:54.631720 6439 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 00:09:54.631728 6439 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:09:54.631739 6439 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 00:09:54.631753 6439 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:09:54.631795 6439 factory.go:656] Stopping watch factory\\\\nI0930 00:09:54.631810 6439 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lkdqg_openshift-ovn-kubernetes(5ec79e76-2a92-48e7-a55f-f8e630b00ed5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:03Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.058162 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.058209 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.058218 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.058235 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.058245 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:03Z","lastTransitionTime":"2025-09-30T00:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.160447 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.160506 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.160524 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.160549 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.160566 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:03Z","lastTransitionTime":"2025-09-30T00:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.263399 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.263494 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.263514 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.263542 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.263563 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:03Z","lastTransitionTime":"2025-09-30T00:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.367163 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.367205 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.367215 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.367232 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.367241 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:03Z","lastTransitionTime":"2025-09-30T00:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.471251 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.471323 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.471345 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.471376 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.471396 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:03Z","lastTransitionTime":"2025-09-30T00:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.575434 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.575514 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.575532 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.575563 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.575583 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:03Z","lastTransitionTime":"2025-09-30T00:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.679001 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.679065 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.679082 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.679107 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.679123 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:03Z","lastTransitionTime":"2025-09-30T00:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.690700 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.690813 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.690719 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:03 crc kubenswrapper[4809]: E0930 00:10:03.690869 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.690894 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:03 crc kubenswrapper[4809]: E0930 00:10:03.691056 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:03 crc kubenswrapper[4809]: E0930 00:10:03.691132 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:03 crc kubenswrapper[4809]: E0930 00:10:03.691368 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.781353 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.781443 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.781461 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.781488 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.781512 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:03Z","lastTransitionTime":"2025-09-30T00:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.885132 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.885199 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.885215 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.885237 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.885258 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:03Z","lastTransitionTime":"2025-09-30T00:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.989075 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.989147 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.989167 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.989191 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:03 crc kubenswrapper[4809]: I0930 00:10:03.989208 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:03Z","lastTransitionTime":"2025-09-30T00:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.093151 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.093243 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.093273 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.093308 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.093333 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:04Z","lastTransitionTime":"2025-09-30T00:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.196449 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.196498 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.196508 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.196528 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.196540 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:04Z","lastTransitionTime":"2025-09-30T00:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.300218 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.300303 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.300328 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.300357 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.300374 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:04Z","lastTransitionTime":"2025-09-30T00:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.403547 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.403621 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.403687 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.403721 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.403745 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:04Z","lastTransitionTime":"2025-09-30T00:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.506490 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.506518 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.506526 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.506538 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.506546 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:04Z","lastTransitionTime":"2025-09-30T00:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.609101 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.609157 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.609174 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.609200 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.609220 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:04Z","lastTransitionTime":"2025-09-30T00:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.712708 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.712783 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.712799 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.712825 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.712841 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:04Z","lastTransitionTime":"2025-09-30T00:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.816076 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.816159 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.816214 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.816243 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.816265 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:04Z","lastTransitionTime":"2025-09-30T00:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.919284 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.919337 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.919354 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.919379 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:04 crc kubenswrapper[4809]: I0930 00:10:04.919396 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:04Z","lastTransitionTime":"2025-09-30T00:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.021608 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.021690 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.021705 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.021725 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.021740 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:05Z","lastTransitionTime":"2025-09-30T00:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.124055 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.124104 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.124115 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.124132 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.124147 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:05Z","lastTransitionTime":"2025-09-30T00:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.227340 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.227401 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.227419 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.227442 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.227459 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:05Z","lastTransitionTime":"2025-09-30T00:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.329879 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.329956 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.329981 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.330010 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.330032 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:05Z","lastTransitionTime":"2025-09-30T00:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.433138 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.433210 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.433244 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.433272 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.433292 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:05Z","lastTransitionTime":"2025-09-30T00:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.536966 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.537033 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.537055 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.537084 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.537110 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:05Z","lastTransitionTime":"2025-09-30T00:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.641560 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.641615 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.641632 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.641688 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.641708 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:05Z","lastTransitionTime":"2025-09-30T00:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.690308 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.690588 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.690618 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:05 crc kubenswrapper[4809]: E0930 00:10:05.690742 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.690821 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:05 crc kubenswrapper[4809]: E0930 00:10:05.690969 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:05 crc kubenswrapper[4809]: E0930 00:10:05.691158 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:05 crc kubenswrapper[4809]: E0930 00:10:05.692721 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.745393 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.745458 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.745482 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.745505 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.745519 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:05Z","lastTransitionTime":"2025-09-30T00:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.848743 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.848866 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.848886 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.848913 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.848932 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:05Z","lastTransitionTime":"2025-09-30T00:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.952085 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.952146 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.952155 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.952203 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:05 crc kubenswrapper[4809]: I0930 00:10:05.952219 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:05Z","lastTransitionTime":"2025-09-30T00:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.055555 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.055620 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.055629 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.055669 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.055680 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:06Z","lastTransitionTime":"2025-09-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.159038 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.159088 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.159099 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.159116 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.159129 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:06Z","lastTransitionTime":"2025-09-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.262965 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.263054 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.263072 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.263104 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.263124 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:06Z","lastTransitionTime":"2025-09-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.366576 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.366633 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.366866 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.366886 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.366896 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:06Z","lastTransitionTime":"2025-09-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.470140 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.470202 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.470215 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.470239 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.470255 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:06Z","lastTransitionTime":"2025-09-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.575112 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.575244 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.575266 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.575297 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.575354 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:06Z","lastTransitionTime":"2025-09-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.678968 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.679140 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.679164 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.679192 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.679210 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:06Z","lastTransitionTime":"2025-09-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.781840 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.781908 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.781928 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.781958 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.781976 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:06Z","lastTransitionTime":"2025-09-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.885400 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.885475 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.885496 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.885531 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.885552 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:06Z","lastTransitionTime":"2025-09-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.920370 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.920421 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.920431 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.920453 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.920465 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:06Z","lastTransitionTime":"2025-09-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:06 crc kubenswrapper[4809]: E0930 00:10:06.938396 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:06Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.943696 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.943774 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.943797 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.943830 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.943858 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:06Z","lastTransitionTime":"2025-09-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:06 crc kubenswrapper[4809]: E0930 00:10:06.963692 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:06Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.968215 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.968269 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.968281 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.968306 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.968322 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:06Z","lastTransitionTime":"2025-09-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:06 crc kubenswrapper[4809]: E0930 00:10:06.986687 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:06Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.993045 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.993102 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.993112 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.993132 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:06 crc kubenswrapper[4809]: I0930 00:10:06.993151 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:06Z","lastTransitionTime":"2025-09-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:07 crc kubenswrapper[4809]: E0930 00:10:07.009407 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:07Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.015991 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.016048 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.016059 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.016081 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.016094 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:07Z","lastTransitionTime":"2025-09-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:07 crc kubenswrapper[4809]: E0930 00:10:07.035270 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:07Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:07 crc kubenswrapper[4809]: E0930 00:10:07.035398 4809 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.037931 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.037962 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.037969 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.037989 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.038000 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:07Z","lastTransitionTime":"2025-09-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.141383 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.141499 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.141527 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.141565 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.141593 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:07Z","lastTransitionTime":"2025-09-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.244848 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.244967 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.245000 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.245033 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.245053 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:07Z","lastTransitionTime":"2025-09-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.349398 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.349617 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.349633 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.349685 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.349704 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:07Z","lastTransitionTime":"2025-09-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.453663 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.453721 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.453731 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.453747 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.453759 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:07Z","lastTransitionTime":"2025-09-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.557440 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.557508 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.557522 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.557545 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.557561 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:07Z","lastTransitionTime":"2025-09-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.660157 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.660190 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.660199 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.660211 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.660220 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:07Z","lastTransitionTime":"2025-09-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.690066 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.690126 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.690080 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:07 crc kubenswrapper[4809]: E0930 00:10:07.690228 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.690268 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:07 crc kubenswrapper[4809]: E0930 00:10:07.690535 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:07 crc kubenswrapper[4809]: E0930 00:10:07.690753 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:07 crc kubenswrapper[4809]: E0930 00:10:07.690917 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.769578 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.770039 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.770196 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.770357 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.770557 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:07Z","lastTransitionTime":"2025-09-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.873470 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.873528 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.873546 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.873571 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.873590 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:07Z","lastTransitionTime":"2025-09-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.977309 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.977377 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.977403 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.977432 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:07 crc kubenswrapper[4809]: I0930 00:10:07.977456 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:07Z","lastTransitionTime":"2025-09-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.080912 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.080979 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.081001 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.081031 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.081057 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:08Z","lastTransitionTime":"2025-09-30T00:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.184950 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.185019 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.185040 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.185070 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.185095 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:08Z","lastTransitionTime":"2025-09-30T00:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.287550 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.287607 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.287623 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.287683 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.287722 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:08Z","lastTransitionTime":"2025-09-30T00:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.390897 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.390967 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.390984 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.391054 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.391071 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:08Z","lastTransitionTime":"2025-09-30T00:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.494071 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.494123 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.494141 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.494172 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.494196 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:08Z","lastTransitionTime":"2025-09-30T00:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.597451 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.597536 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.597560 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.597592 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.597616 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:08Z","lastTransitionTime":"2025-09-30T00:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.691693 4809 scope.go:117] "RemoveContainer" containerID="f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea" Sep 30 00:10:08 crc kubenswrapper[4809]: E0930 00:10:08.691935 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lkdqg_openshift-ovn-kubernetes(5ec79e76-2a92-48e7-a55f-f8e630b00ed5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.701199 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.701286 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.701299 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.701343 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.701357 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:08Z","lastTransitionTime":"2025-09-30T00:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.805065 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.805122 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.805132 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.805157 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.805169 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:08Z","lastTransitionTime":"2025-09-30T00:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.908742 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.908817 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.908834 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.908857 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:08 crc kubenswrapper[4809]: I0930 00:10:08.908874 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:08Z","lastTransitionTime":"2025-09-30T00:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.012768 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.012837 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.012860 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.012890 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.012911 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:09Z","lastTransitionTime":"2025-09-30T00:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.115615 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.115720 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.115739 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.115766 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.115785 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:09Z","lastTransitionTime":"2025-09-30T00:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.218868 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.218913 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.218928 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.218947 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.218962 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:09Z","lastTransitionTime":"2025-09-30T00:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.322140 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.322215 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.322232 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.322258 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.322277 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:09Z","lastTransitionTime":"2025-09-30T00:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.425075 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.425132 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.425156 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.425184 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.425208 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:09Z","lastTransitionTime":"2025-09-30T00:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.528757 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.528821 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.528844 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.528875 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.528897 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:09Z","lastTransitionTime":"2025-09-30T00:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.632437 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.632510 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.632527 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.632549 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.632566 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:09Z","lastTransitionTime":"2025-09-30T00:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.690045 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.690127 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.690050 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:09 crc kubenswrapper[4809]: E0930 00:10:09.690200 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:09 crc kubenswrapper[4809]: E0930 00:10:09.690568 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:09 crc kubenswrapper[4809]: E0930 00:10:09.690679 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.690753 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:09 crc kubenswrapper[4809]: E0930 00:10:09.690818 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.714063 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:09Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.734335 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:54Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 00:09:54.631531 6439 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 00:09:54.631568 6439 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:09:54.631603 6439 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 00:09:54.631625 6439 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 00:09:54.631633 6439 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 00:09:54.631683 6439 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 00:09:54.631694 6439 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 00:09:54.631707 6439 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 00:09:54.631701 6439 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:09:54.631720 6439 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 00:09:54.631728 6439 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:09:54.631739 6439 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 00:09:54.631753 6439 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:09:54.631795 6439 factory.go:656] Stopping watch factory\\\\nI0930 00:09:54.631810 6439 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lkdqg_openshift-ovn-kubernetes(5ec79e76-2a92-48e7-a55f-f8e630b00ed5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:09Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.735611 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.735660 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.735673 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.735691 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.735702 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:09Z","lastTransitionTime":"2025-09-30T00:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.747430 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"830a7e52-5a85-4555-8bcb-57828953b475\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e6afe1ae0b09081f5403143b4305f74ea3cac0f0ced46daef33d5601a15e7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5573285e4e2c628192a641224044c3a187785a35ff8f698318b1b2bd727d4dbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qdg49\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:09Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.759667 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a002504f-4eff-4d00-b908-f7628cbcd59e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77001c72a4c411736ea00880004e17f60ba4384f43da67cdd6a20fb9352add2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9432f80d6c9f58d48e1598fe727a0c255f16396ff5895cc9a7f9c95774e1c47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41030661a6009fd19ff2d46ec204a32d61d9b063c20f55e183b961611cd93eb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b602e4c0621a5285e8c596f35a817f599b5eda5bfdf69abb8cce7407e4167ae9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b602e4c0621a5285e8c596f35a817f599b5eda5bfdf69abb8cce7407e4167ae9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:09Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.781155 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:09Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.799460 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:09Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.811843 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:09Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.825210 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:09Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.838939 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.839038 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.839052 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.839071 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.839086 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:09Z","lastTransitionTime":"2025-09-30T00:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.839758 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:09Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.850871 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43a82899-34fa-4d03-9856-7303839926c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4ktzq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:09Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.866274 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26d9171f78f77e894d1ec968ed7a51f3bd6761bf1eb8ace2ce322c945c48f49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:09Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.879304 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:09Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.890408 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:09Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.904024 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:09Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.916964 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:09Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.936505 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:09Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.942184 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.942283 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.942307 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.942372 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.942396 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:09Z","lastTransitionTime":"2025-09-30T00:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.952554 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:09Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:09 crc kubenswrapper[4809]: I0930 00:10:09.966049 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:09Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.044723 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.044774 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.044787 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.044805 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.044819 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:10Z","lastTransitionTime":"2025-09-30T00:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.149245 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.149290 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.149298 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.149312 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.149322 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:10Z","lastTransitionTime":"2025-09-30T00:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.252444 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.252524 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.252542 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.252567 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.252585 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:10Z","lastTransitionTime":"2025-09-30T00:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.355726 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.355790 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.355808 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.355830 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.355848 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:10Z","lastTransitionTime":"2025-09-30T00:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.457979 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.458044 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.458056 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.458074 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.458086 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:10Z","lastTransitionTime":"2025-09-30T00:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.561556 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.561611 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.561624 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.561647 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.561689 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:10Z","lastTransitionTime":"2025-09-30T00:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.664702 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.664747 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.664756 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.664770 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.664780 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:10Z","lastTransitionTime":"2025-09-30T00:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.766952 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.767497 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.767512 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.767527 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.767539 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:10Z","lastTransitionTime":"2025-09-30T00:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.870117 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.870183 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.870196 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.870212 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.870223 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:10Z","lastTransitionTime":"2025-09-30T00:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.973025 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.973098 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.973116 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.973142 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:10 crc kubenswrapper[4809]: I0930 00:10:10.973160 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:10Z","lastTransitionTime":"2025-09-30T00:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.076483 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.076563 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.076588 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.076617 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.076674 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:11Z","lastTransitionTime":"2025-09-30T00:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.180454 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.180522 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.180539 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.180567 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.180590 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:11Z","lastTransitionTime":"2025-09-30T00:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.284271 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.284333 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.284351 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.284380 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.284397 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:11Z","lastTransitionTime":"2025-09-30T00:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.387831 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.387926 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.387945 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.387977 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.388000 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:11Z","lastTransitionTime":"2025-09-30T00:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.491388 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.491457 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.491477 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.491507 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.491528 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:11Z","lastTransitionTime":"2025-09-30T00:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.595165 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.595293 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.595312 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.595347 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.595370 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:11Z","lastTransitionTime":"2025-09-30T00:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.690009 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.690041 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.690052 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.690068 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:11 crc kubenswrapper[4809]: E0930 00:10:11.690190 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:11 crc kubenswrapper[4809]: E0930 00:10:11.690381 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:11 crc kubenswrapper[4809]: E0930 00:10:11.690497 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:11 crc kubenswrapper[4809]: E0930 00:10:11.690614 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.699202 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.699266 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.699284 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.699307 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.699325 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:11Z","lastTransitionTime":"2025-09-30T00:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.802055 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.802117 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.802137 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.802167 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.802186 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:11Z","lastTransitionTime":"2025-09-30T00:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.905374 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.905436 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.905459 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.905491 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:11 crc kubenswrapper[4809]: I0930 00:10:11.905513 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:11Z","lastTransitionTime":"2025-09-30T00:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.008259 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.008318 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.008341 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.008368 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.008390 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:12Z","lastTransitionTime":"2025-09-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.113103 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.113175 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.113200 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.113227 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.113244 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:12Z","lastTransitionTime":"2025-09-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.216982 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.217026 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.217038 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.217055 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.217067 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:12Z","lastTransitionTime":"2025-09-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.320210 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.320256 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.320270 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.320296 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.320313 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:12Z","lastTransitionTime":"2025-09-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.423831 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.423893 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.423908 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.423931 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.423946 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:12Z","lastTransitionTime":"2025-09-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.527717 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.527774 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.527789 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.527811 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.527826 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:12Z","lastTransitionTime":"2025-09-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.631774 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.631834 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.631850 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.631874 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.631891 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:12Z","lastTransitionTime":"2025-09-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.734987 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.735031 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.735044 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.735066 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.735109 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:12Z","lastTransitionTime":"2025-09-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.838557 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.838605 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.838615 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.838633 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.838658 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:12Z","lastTransitionTime":"2025-09-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.943019 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.943074 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.943089 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.943114 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:12 crc kubenswrapper[4809]: I0930 00:10:12.943128 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:12Z","lastTransitionTime":"2025-09-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.047053 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.047091 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.047100 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.047122 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.047135 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:13Z","lastTransitionTime":"2025-09-30T00:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.150124 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.150168 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.150180 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.150197 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.150208 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:13Z","lastTransitionTime":"2025-09-30T00:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.252284 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.252340 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.252354 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.252374 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.252386 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:13Z","lastTransitionTime":"2025-09-30T00:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.355761 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.355813 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.355825 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.355846 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.355856 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:13Z","lastTransitionTime":"2025-09-30T00:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.458806 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.458851 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.458863 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.458879 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.458891 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:13Z","lastTransitionTime":"2025-09-30T00:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.562033 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.562102 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.562114 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.562134 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.562146 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:13Z","lastTransitionTime":"2025-09-30T00:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.665546 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.665595 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.665607 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.665630 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.665660 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:13Z","lastTransitionTime":"2025-09-30T00:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.690558 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.690614 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.690614 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.690740 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:13 crc kubenswrapper[4809]: E0930 00:10:13.690957 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:13 crc kubenswrapper[4809]: E0930 00:10:13.691086 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:13 crc kubenswrapper[4809]: E0930 00:10:13.691280 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:13 crc kubenswrapper[4809]: E0930 00:10:13.691356 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.770283 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.770344 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.770359 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.770379 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.770392 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:13Z","lastTransitionTime":"2025-09-30T00:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.874265 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.874319 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.874331 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.874350 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.874364 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:13Z","lastTransitionTime":"2025-09-30T00:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.977492 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.977557 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.977575 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.977598 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:13 crc kubenswrapper[4809]: I0930 00:10:13.977615 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:13Z","lastTransitionTime":"2025-09-30T00:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.081814 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.081881 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.081898 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.081929 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.081952 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:14Z","lastTransitionTime":"2025-09-30T00:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.185853 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.186070 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.186091 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.186118 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.186140 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:14Z","lastTransitionTime":"2025-09-30T00:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.289253 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.289311 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.289325 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.289347 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.289598 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:14Z","lastTransitionTime":"2025-09-30T00:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.392050 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.392124 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.392136 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.392152 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.392163 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:14Z","lastTransitionTime":"2025-09-30T00:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.495764 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.495840 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.495853 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.495878 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.495891 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:14Z","lastTransitionTime":"2025-09-30T00:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.599048 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.599099 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.599112 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.599131 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.599144 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:14Z","lastTransitionTime":"2025-09-30T00:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.703195 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.703250 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.703266 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.703290 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.703309 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:14Z","lastTransitionTime":"2025-09-30T00:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.806843 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.806909 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.806923 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.806943 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.806956 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:14Z","lastTransitionTime":"2025-09-30T00:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.909887 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.910474 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.910585 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.910746 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:14 crc kubenswrapper[4809]: I0930 00:10:14.910873 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:14Z","lastTransitionTime":"2025-09-30T00:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.013227 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.013275 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.013286 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.013303 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.013317 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:15Z","lastTransitionTime":"2025-09-30T00:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.116136 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.116190 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.116204 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.116224 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.116236 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:15Z","lastTransitionTime":"2025-09-30T00:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.218622 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.218752 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.218775 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.218806 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.218828 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:15Z","lastTransitionTime":"2025-09-30T00:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.322378 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.322450 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.322467 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.322495 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.322516 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:15Z","lastTransitionTime":"2025-09-30T00:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.426026 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.426070 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.426082 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.426098 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.426110 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:15Z","lastTransitionTime":"2025-09-30T00:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.528796 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.528885 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.528909 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.528940 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.528961 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:15Z","lastTransitionTime":"2025-09-30T00:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.632546 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.632619 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.632675 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.632710 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.632730 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:15Z","lastTransitionTime":"2025-09-30T00:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.690799 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.690912 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.690978 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:15 crc kubenswrapper[4809]: E0930 00:10:15.690980 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.690826 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:15 crc kubenswrapper[4809]: E0930 00:10:15.691086 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:15 crc kubenswrapper[4809]: E0930 00:10:15.691236 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:15 crc kubenswrapper[4809]: E0930 00:10:15.691380 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.735189 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.735229 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.735240 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.735257 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.735268 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:15Z","lastTransitionTime":"2025-09-30T00:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.838317 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.838369 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.838387 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.838408 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.838424 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:15Z","lastTransitionTime":"2025-09-30T00:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.941236 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.941335 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.941360 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.941392 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:15 crc kubenswrapper[4809]: I0930 00:10:15.941425 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:15Z","lastTransitionTime":"2025-09-30T00:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.044415 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.044471 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.044484 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.044503 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.044516 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:16Z","lastTransitionTime":"2025-09-30T00:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.146729 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.146781 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.146794 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.146815 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.146834 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:16Z","lastTransitionTime":"2025-09-30T00:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.249313 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.249388 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.249412 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.249442 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.249466 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:16Z","lastTransitionTime":"2025-09-30T00:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.352169 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.352219 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.352228 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.352245 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.352256 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:16Z","lastTransitionTime":"2025-09-30T00:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.454928 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.454969 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.454980 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.454997 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.455008 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:16Z","lastTransitionTime":"2025-09-30T00:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.557396 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.557432 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.557440 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.557456 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.557465 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:16Z","lastTransitionTime":"2025-09-30T00:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.660118 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.660169 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.660185 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.660207 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.660226 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:16Z","lastTransitionTime":"2025-09-30T00:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.763307 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.763344 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.763356 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.763371 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.763381 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:16Z","lastTransitionTime":"2025-09-30T00:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.866224 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.866319 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.866336 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.866360 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.866378 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:16Z","lastTransitionTime":"2025-09-30T00:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.975087 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.975153 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.975176 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.975322 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:16 crc kubenswrapper[4809]: I0930 00:10:16.975353 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:16Z","lastTransitionTime":"2025-09-30T00:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.078571 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.078627 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.078636 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.078666 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.078675 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:17Z","lastTransitionTime":"2025-09-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.182130 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.182181 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.182191 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.182209 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.182224 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:17Z","lastTransitionTime":"2025-09-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.271813 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.271881 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.271894 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.271914 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.271927 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:17Z","lastTransitionTime":"2025-09-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:17 crc kubenswrapper[4809]: E0930 00:10:17.288477 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:17Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.292712 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.292745 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.292758 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.292774 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.292787 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:17Z","lastTransitionTime":"2025-09-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:17 crc kubenswrapper[4809]: E0930 00:10:17.317349 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:17Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.322864 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.322918 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.322938 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.322962 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.322980 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:17Z","lastTransitionTime":"2025-09-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:17 crc kubenswrapper[4809]: E0930 00:10:17.340932 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:17Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.344771 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.344815 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.344826 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.344844 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.344855 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:17Z","lastTransitionTime":"2025-09-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:17 crc kubenswrapper[4809]: E0930 00:10:17.358549 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:17Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.363496 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.363541 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.363553 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.363574 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.363588 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:17Z","lastTransitionTime":"2025-09-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:17 crc kubenswrapper[4809]: E0930 00:10:17.376872 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:17Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:17 crc kubenswrapper[4809]: E0930 00:10:17.377025 4809 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.378916 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.378944 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.378956 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.378971 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.378983 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:17Z","lastTransitionTime":"2025-09-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.482171 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.482231 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.482249 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.482270 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.482282 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:17Z","lastTransitionTime":"2025-09-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.595053 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.595093 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.595104 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.595121 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.595132 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:17Z","lastTransitionTime":"2025-09-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.689903 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.689960 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:17 crc kubenswrapper[4809]: E0930 00:10:17.690026 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.690080 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.689903 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:17 crc kubenswrapper[4809]: E0930 00:10:17.690415 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:17 crc kubenswrapper[4809]: E0930 00:10:17.690398 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:17 crc kubenswrapper[4809]: E0930 00:10:17.690467 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.698085 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.698149 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.698166 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.698187 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.698202 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:17Z","lastTransitionTime":"2025-09-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.803536 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.803592 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.803604 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.803623 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.803640 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:17Z","lastTransitionTime":"2025-09-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.906580 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.906628 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.906638 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.906675 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:17 crc kubenswrapper[4809]: I0930 00:10:17.906685 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:17Z","lastTransitionTime":"2025-09-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.009396 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.009444 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.009457 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.009476 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.009486 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:18Z","lastTransitionTime":"2025-09-30T00:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.112438 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.112487 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.112501 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.112518 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.112531 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:18Z","lastTransitionTime":"2025-09-30T00:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.175082 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs\") pod \"network-metrics-daemon-4ktzq\" (UID: \"43a82899-34fa-4d03-9856-7303839926c2\") " pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:18 crc kubenswrapper[4809]: E0930 00:10:18.175272 4809 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 00:10:18 crc kubenswrapper[4809]: E0930 00:10:18.175358 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs podName:43a82899-34fa-4d03-9856-7303839926c2 nodeName:}" failed. No retries permitted until 2025-09-30 00:10:50.175338794 +0000 UTC m=+101.211588292 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs") pod "network-metrics-daemon-4ktzq" (UID: "43a82899-34fa-4d03-9856-7303839926c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.215209 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.215262 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.215275 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.215292 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.215304 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:18Z","lastTransitionTime":"2025-09-30T00:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.318257 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.318311 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.318329 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.318351 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.318368 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:18Z","lastTransitionTime":"2025-09-30T00:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.425073 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.425167 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.425244 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.425312 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.425369 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:18Z","lastTransitionTime":"2025-09-30T00:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.529466 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.529531 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.529551 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.529576 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.529594 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:18Z","lastTransitionTime":"2025-09-30T00:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.632180 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.632234 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.632246 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.632261 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.632272 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:18Z","lastTransitionTime":"2025-09-30T00:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.735082 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.735166 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.735189 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.735221 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.735245 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:18Z","lastTransitionTime":"2025-09-30T00:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.838098 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.838154 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.838168 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.838185 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.838198 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:18Z","lastTransitionTime":"2025-09-30T00:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.940903 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.940969 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.940992 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.941023 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:18 crc kubenswrapper[4809]: I0930 00:10:18.941047 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:18Z","lastTransitionTime":"2025-09-30T00:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.043545 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.043623 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.043682 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.043708 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.043725 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:19Z","lastTransitionTime":"2025-09-30T00:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.140929 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-h6xqr_efc7b2e1-7308-483a-9117-02e83c45a528/kube-multus/0.log" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.141268 4809 generic.go:334] "Generic (PLEG): container finished" podID="efc7b2e1-7308-483a-9117-02e83c45a528" containerID="a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3" exitCode=1 Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.141356 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-h6xqr" event={"ID":"efc7b2e1-7308-483a-9117-02e83c45a528","Type":"ContainerDied","Data":"a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3"} Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.141992 4809 scope.go:117] "RemoveContainer" containerID="a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.146005 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.146042 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.146056 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.146075 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.146087 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:19Z","lastTransitionTime":"2025-09-30T00:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.157933 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.177714 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.195826 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.211823 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.229856 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"830a7e52-5a85-4555-8bcb-57828953b475\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e6afe1ae0b09081f5403143b4305f74ea3cac0f0ced46daef33d5601a15e7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5573285e4e2c628192a641224044c3a187785a35ff8f698318b1b2bd727d4dbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qdg49\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.246338 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a002504f-4eff-4d00-b908-f7628cbcd59e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77001c72a4c411736ea00880004e17f60ba4384f43da67cdd6a20fb9352add2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9432f80d6c9f58d48e1598fe727a0c255f16396ff5895cc9a7f9c95774e1c47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41030661a6009fd19ff2d46ec204a32d61d9b063c20f55e183b961611cd93eb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b602e4c0621a5285e8c596f35a817f599b5eda5bfdf69abb8cce7407e4167ae9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b602e4c0621a5285e8c596f35a817f599b5eda5bfdf69abb8cce7407e4167ae9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.248551 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.248584 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.248595 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.248613 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.248627 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:19Z","lastTransitionTime":"2025-09-30T00:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.271054 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.285369 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43a82899-34fa-4d03-9856-7303839926c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4ktzq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.306359 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26d9171f78f77e894d1ec968ed7a51f3bd6761bf1eb8ace2ce322c945c48f49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.323416 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.336436 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.350577 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.351366 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.351406 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.351418 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.351438 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.351450 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:19Z","lastTransitionTime":"2025-09-30T00:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.361260 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.374277 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.385108 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.400363 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:10:18Z\\\",\\\"message\\\":\\\"2025-09-30T00:09:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e2673248-8c83-4306-bd00-85b169ca8400\\\\n2025-09-30T00:09:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e2673248-8c83-4306-bd00-85b169ca8400 to /host/opt/cni/bin/\\\\n2025-09-30T00:09:33Z [verbose] multus-daemon started\\\\n2025-09-30T00:09:33Z [verbose] Readiness Indicator file check\\\\n2025-09-30T00:10:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.417029 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.438210 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:54Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 00:09:54.631531 6439 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 00:09:54.631568 6439 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:09:54.631603 6439 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 00:09:54.631625 6439 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 00:09:54.631633 6439 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 00:09:54.631683 6439 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 00:09:54.631694 6439 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 00:09:54.631707 6439 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 00:09:54.631701 6439 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:09:54.631720 6439 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 00:09:54.631728 6439 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:09:54.631739 6439 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 00:09:54.631753 6439 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:09:54.631795 6439 factory.go:656] Stopping watch factory\\\\nI0930 00:09:54.631810 6439 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lkdqg_openshift-ovn-kubernetes(5ec79e76-2a92-48e7-a55f-f8e630b00ed5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.453901 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.453961 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.453977 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.453999 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.454015 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:19Z","lastTransitionTime":"2025-09-30T00:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.555461 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.555502 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.555513 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.555530 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.555540 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:19Z","lastTransitionTime":"2025-09-30T00:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.658867 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.658907 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.658916 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.658932 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.658941 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:19Z","lastTransitionTime":"2025-09-30T00:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.690832 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.690882 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.690891 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:19 crc kubenswrapper[4809]: E0930 00:10:19.690987 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.691026 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:19 crc kubenswrapper[4809]: E0930 00:10:19.691140 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:19 crc kubenswrapper[4809]: E0930 00:10:19.691320 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:19 crc kubenswrapper[4809]: E0930 00:10:19.691510 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.709600 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a002504f-4eff-4d00-b908-f7628cbcd59e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77001c72a4c411736ea00880004e17f60ba4384f43da67cdd6a20fb9352add2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9432f80d6c9f58d48e1598fe727a0c255f16396ff5895cc9a7f9c95774e1c47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41030661a6009fd19ff2d46ec204a32d61d9b063c20f55e183b961611cd93eb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b602e4c0621a5285e8c596f35a817f599b5eda5bfdf69abb8cce7407e4167ae9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b602e4c0621a5285e8c596f35a817f599b5eda5bfdf69abb8cce7407e4167ae9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.730229 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.747045 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.761430 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.761496 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.761521 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.761554 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.761576 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:19Z","lastTransitionTime":"2025-09-30T00:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.762570 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.777764 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.792439 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.805935 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"830a7e52-5a85-4555-8bcb-57828953b475\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e6afe1ae0b09081f5403143b4305f74ea3cac0f0ced46daef33d5601a15e7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5573285e4e2c628192a641224044c3a187785a35ff8f698318b1b2bd727d4dbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qdg49\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.816491 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43a82899-34fa-4d03-9856-7303839926c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4ktzq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.828414 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26d9171f78f77e894d1ec968ed7a51f3bd6761bf1eb8ace2ce322c945c48f49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.838033 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.849486 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.864134 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.864172 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.864113 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.864185 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.864292 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.864303 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:19Z","lastTransitionTime":"2025-09-30T00:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.874586 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.888258 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:10:18Z\\\",\\\"message\\\":\\\"2025-09-30T00:09:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e2673248-8c83-4306-bd00-85b169ca8400\\\\n2025-09-30T00:09:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e2673248-8c83-4306-bd00-85b169ca8400 to /host/opt/cni/bin/\\\\n2025-09-30T00:09:33Z [verbose] multus-daemon started\\\\n2025-09-30T00:09:33Z [verbose] Readiness Indicator file check\\\\n2025-09-30T00:10:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.902557 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.912256 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.925014 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.944569 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:54Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 00:09:54.631531 6439 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 00:09:54.631568 6439 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:09:54.631603 6439 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 00:09:54.631625 6439 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 00:09:54.631633 6439 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 00:09:54.631683 6439 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 00:09:54.631694 6439 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 00:09:54.631707 6439 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 00:09:54.631701 6439 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:09:54.631720 6439 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 00:09:54.631728 6439 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:09:54.631739 6439 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 00:09:54.631753 6439 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:09:54.631795 6439 factory.go:656] Stopping watch factory\\\\nI0930 00:09:54.631810 6439 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lkdqg_openshift-ovn-kubernetes(5ec79e76-2a92-48e7-a55f-f8e630b00ed5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:19Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.967322 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.967363 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.967378 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.967403 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:19 crc kubenswrapper[4809]: I0930 00:10:19.967419 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:19Z","lastTransitionTime":"2025-09-30T00:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.068995 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.069040 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.069054 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.069071 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.069083 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:20Z","lastTransitionTime":"2025-09-30T00:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.150308 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-h6xqr_efc7b2e1-7308-483a-9117-02e83c45a528/kube-multus/0.log" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.150385 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-h6xqr" event={"ID":"efc7b2e1-7308-483a-9117-02e83c45a528","Type":"ContainerStarted","Data":"2c471200a61cc3d7cb632c1e3690f62cfe5565dfda25ae3701efd13482755885"} Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.167557 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:20Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.172224 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.172253 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.172264 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.172280 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.172288 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:20Z","lastTransitionTime":"2025-09-30T00:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.179931 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:20Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.194928 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c471200a61cc3d7cb632c1e3690f62cfe5565dfda25ae3701efd13482755885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:10:18Z\\\",\\\"message\\\":\\\"2025-09-30T00:09:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e2673248-8c83-4306-bd00-85b169ca8400\\\\n2025-09-30T00:09:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e2673248-8c83-4306-bd00-85b169ca8400 to /host/opt/cni/bin/\\\\n2025-09-30T00:09:33Z [verbose] multus-daemon started\\\\n2025-09-30T00:09:33Z [verbose] Readiness Indicator file check\\\\n2025-09-30T00:10:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:20Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.209019 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:20Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.219352 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:20Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.232701 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:20Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.256327 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:54Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 00:09:54.631531 6439 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 00:09:54.631568 6439 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:09:54.631603 6439 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 00:09:54.631625 6439 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 00:09:54.631633 6439 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 00:09:54.631683 6439 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 00:09:54.631694 6439 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 00:09:54.631707 6439 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 00:09:54.631701 6439 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:09:54.631720 6439 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 00:09:54.631728 6439 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:09:54.631739 6439 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 00:09:54.631753 6439 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:09:54.631795 6439 factory.go:656] Stopping watch factory\\\\nI0930 00:09:54.631810 6439 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lkdqg_openshift-ovn-kubernetes(5ec79e76-2a92-48e7-a55f-f8e630b00ed5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:20Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.271819 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:20Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.276336 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.276369 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.276377 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.276390 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.276400 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:20Z","lastTransitionTime":"2025-09-30T00:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.284716 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:20Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.298072 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"830a7e52-5a85-4555-8bcb-57828953b475\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e6afe1ae0b09081f5403143b4305f74ea3cac0f0ced46daef33d5601a15e7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5573285e4e2c628192a641224044c3a187785a35ff8f698318b1b2bd727d4dbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qdg49\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:20Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.312625 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a002504f-4eff-4d00-b908-f7628cbcd59e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77001c72a4c411736ea00880004e17f60ba4384f43da67cdd6a20fb9352add2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9432f80d6c9f58d48e1598fe727a0c255f16396ff5895cc9a7f9c95774e1c47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41030661a6009fd19ff2d46ec204a32d61d9b063c20f55e183b961611cd93eb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b602e4c0621a5285e8c596f35a817f599b5eda5bfdf69abb8cce7407e4167ae9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b602e4c0621a5285e8c596f35a817f599b5eda5bfdf69abb8cce7407e4167ae9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:20Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.331082 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:20Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.345125 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:20Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.360137 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:20Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.371966 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43a82899-34fa-4d03-9856-7303839926c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4ktzq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:20Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.378770 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.378811 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.378820 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.378835 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.378845 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:20Z","lastTransitionTime":"2025-09-30T00:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.386108 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26d9171f78f77e894d1ec968ed7a51f3bd6761bf1eb8ace2ce322c945c48f49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:20Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.398383 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:20Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.410914 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:20Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.481334 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.481394 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.481406 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.481426 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.481438 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:20Z","lastTransitionTime":"2025-09-30T00:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.584345 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.584401 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.584415 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.584434 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.584446 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:20Z","lastTransitionTime":"2025-09-30T00:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.686762 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.686809 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.686859 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.686882 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.686931 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:20Z","lastTransitionTime":"2025-09-30T00:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.789523 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.789605 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.789628 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.789710 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.789735 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:20Z","lastTransitionTime":"2025-09-30T00:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.892033 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.892093 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.892125 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.892167 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.892189 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:20Z","lastTransitionTime":"2025-09-30T00:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.994763 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.994832 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.994853 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.994877 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:20 crc kubenswrapper[4809]: I0930 00:10:20.994909 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:20Z","lastTransitionTime":"2025-09-30T00:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.097044 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.097078 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.097088 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.097101 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.097109 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:21Z","lastTransitionTime":"2025-09-30T00:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.199813 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.199857 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.199867 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.199882 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.199892 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:21Z","lastTransitionTime":"2025-09-30T00:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.302345 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.302393 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.302409 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.302431 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.302446 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:21Z","lastTransitionTime":"2025-09-30T00:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.405414 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.405468 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.405480 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.405501 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.405515 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:21Z","lastTransitionTime":"2025-09-30T00:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.508342 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.508382 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.508395 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.508411 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.508425 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:21Z","lastTransitionTime":"2025-09-30T00:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.611553 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.611621 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.611662 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.611695 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.611716 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:21Z","lastTransitionTime":"2025-09-30T00:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.690488 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.690488 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.690707 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.690672 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:21 crc kubenswrapper[4809]: E0930 00:10:21.690753 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:21 crc kubenswrapper[4809]: E0930 00:10:21.690861 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:21 crc kubenswrapper[4809]: E0930 00:10:21.690968 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:21 crc kubenswrapper[4809]: E0930 00:10:21.691109 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.714614 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.714680 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.714690 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.714704 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.714715 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:21Z","lastTransitionTime":"2025-09-30T00:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.817032 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.817291 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.817308 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.817332 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.817348 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:21Z","lastTransitionTime":"2025-09-30T00:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.919237 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.919308 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.919399 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.919435 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:21 crc kubenswrapper[4809]: I0930 00:10:21.919459 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:21Z","lastTransitionTime":"2025-09-30T00:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.022151 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.022238 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.022250 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.022269 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.022287 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:22Z","lastTransitionTime":"2025-09-30T00:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.125416 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.125486 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.125504 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.125527 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.125542 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:22Z","lastTransitionTime":"2025-09-30T00:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.229067 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.229147 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.229165 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.229193 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.229209 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:22Z","lastTransitionTime":"2025-09-30T00:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.331475 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.331552 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.331582 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.331682 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.331711 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:22Z","lastTransitionTime":"2025-09-30T00:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.434293 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.434363 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.434378 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.434402 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.434418 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:22Z","lastTransitionTime":"2025-09-30T00:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.536964 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.537014 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.537027 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.537055 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.537070 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:22Z","lastTransitionTime":"2025-09-30T00:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.639937 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.640001 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.640014 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.640055 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.640071 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:22Z","lastTransitionTime":"2025-09-30T00:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.742800 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.742853 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.742866 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.742889 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.742904 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:22Z","lastTransitionTime":"2025-09-30T00:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.846767 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.846828 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.846843 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.846869 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.846890 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:22Z","lastTransitionTime":"2025-09-30T00:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.950129 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.950187 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.950198 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.950220 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:22 crc kubenswrapper[4809]: I0930 00:10:22.950235 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:22Z","lastTransitionTime":"2025-09-30T00:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.052938 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.052979 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.052990 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.053009 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.053020 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:23Z","lastTransitionTime":"2025-09-30T00:10:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.155288 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.155346 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.155367 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.155391 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.155407 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:23Z","lastTransitionTime":"2025-09-30T00:10:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.258026 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.258100 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.258123 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.258153 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.258176 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:23Z","lastTransitionTime":"2025-09-30T00:10:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.360696 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.360768 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.360786 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.360813 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.360831 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:23Z","lastTransitionTime":"2025-09-30T00:10:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.463231 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.463313 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.463338 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.463372 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.463396 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:23Z","lastTransitionTime":"2025-09-30T00:10:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.565600 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.565654 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.565663 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.565680 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.565688 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:23Z","lastTransitionTime":"2025-09-30T00:10:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.668101 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.668149 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.668160 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.668179 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.668203 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:23Z","lastTransitionTime":"2025-09-30T00:10:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.690874 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.690920 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.690922 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.690996 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:23 crc kubenswrapper[4809]: E0930 00:10:23.691115 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:23 crc kubenswrapper[4809]: E0930 00:10:23.691261 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:23 crc kubenswrapper[4809]: E0930 00:10:23.691624 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:23 crc kubenswrapper[4809]: E0930 00:10:23.691701 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.691997 4809 scope.go:117] "RemoveContainer" containerID="f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.771455 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.771522 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.771541 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.771570 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.771590 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:23Z","lastTransitionTime":"2025-09-30T00:10:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.875452 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.875495 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.875504 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.875522 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.875532 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:23Z","lastTransitionTime":"2025-09-30T00:10:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.978073 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.978117 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.978128 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.978146 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:23 crc kubenswrapper[4809]: I0930 00:10:23.978159 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:23Z","lastTransitionTime":"2025-09-30T00:10:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.094821 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.094888 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.094903 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.094934 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.094948 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:24Z","lastTransitionTime":"2025-09-30T00:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.165780 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lkdqg_5ec79e76-2a92-48e7-a55f-f8e630b00ed5/ovnkube-controller/2.log" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.169790 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerStarted","Data":"6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f"} Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.170797 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.197043 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:54Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 00:09:54.631531 6439 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 00:09:54.631568 6439 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:09:54.631603 6439 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 00:09:54.631625 6439 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 00:09:54.631633 6439 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 00:09:54.631683 6439 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 00:09:54.631694 6439 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 00:09:54.631707 6439 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 00:09:54.631701 6439 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:09:54.631720 6439 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 00:09:54.631728 6439 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:09:54.631739 6439 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 00:09:54.631753 6439 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:09:54.631795 6439 factory.go:656] Stopping watch factory\\\\nI0930 00:09:54.631810 6439 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:24Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.197328 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.197363 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.197371 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.197385 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.197394 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:24Z","lastTransitionTime":"2025-09-30T00:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.219247 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:24Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.243347 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:24Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.257836 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:24Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.272744 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:24Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.289733 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:24Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.299985 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.300022 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.300032 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.300047 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.300058 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:24Z","lastTransitionTime":"2025-09-30T00:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.302327 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:24Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.314791 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"830a7e52-5a85-4555-8bcb-57828953b475\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e6afe1ae0b09081f5403143b4305f74ea3cac0f0ced46daef33d5601a15e7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5573285e4e2c628192a641224044c3a187785a35ff8f698318b1b2bd727d4dbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qdg49\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:24Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.326581 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a002504f-4eff-4d00-b908-f7628cbcd59e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77001c72a4c411736ea00880004e17f60ba4384f43da67cdd6a20fb9352add2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9432f80d6c9f58d48e1598fe727a0c255f16396ff5895cc9a7f9c95774e1c47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41030661a6009fd19ff2d46ec204a32d61d9b063c20f55e183b961611cd93eb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b602e4c0621a5285e8c596f35a817f599b5eda5bfdf69abb8cce7407e4167ae9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b602e4c0621a5285e8c596f35a817f599b5eda5bfdf69abb8cce7407e4167ae9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:24Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.338216 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43a82899-34fa-4d03-9856-7303839926c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4ktzq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:24Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.354000 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:24Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.369558 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:24Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.383337 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26d9171f78f77e894d1ec968ed7a51f3bd6761bf1eb8ace2ce322c945c48f49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:24Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.396698 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:24Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.402594 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.402626 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.402663 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.402682 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.402693 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:24Z","lastTransitionTime":"2025-09-30T00:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.409766 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c471200a61cc3d7cb632c1e3690f62cfe5565dfda25ae3701efd13482755885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:10:18Z\\\",\\\"message\\\":\\\"2025-09-30T00:09:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e2673248-8c83-4306-bd00-85b169ca8400\\\\n2025-09-30T00:09:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e2673248-8c83-4306-bd00-85b169ca8400 to /host/opt/cni/bin/\\\\n2025-09-30T00:09:33Z [verbose] multus-daemon started\\\\n2025-09-30T00:09:33Z [verbose] Readiness Indicator file check\\\\n2025-09-30T00:10:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:24Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.425372 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:24Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.437310 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:24Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.451778 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:24Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.505699 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.505782 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.505804 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.505864 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.505904 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:24Z","lastTransitionTime":"2025-09-30T00:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.611177 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.611225 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.611234 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.611250 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.611262 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:24Z","lastTransitionTime":"2025-09-30T00:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.713386 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.713443 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.713460 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.713483 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.713507 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:24Z","lastTransitionTime":"2025-09-30T00:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.816779 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.816847 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.816865 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.816887 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.816908 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:24Z","lastTransitionTime":"2025-09-30T00:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.920539 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.920604 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.920623 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.920682 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:24 crc kubenswrapper[4809]: I0930 00:10:24.920701 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:24Z","lastTransitionTime":"2025-09-30T00:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.024319 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.024383 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.024402 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.024426 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.024443 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:25Z","lastTransitionTime":"2025-09-30T00:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.127636 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.127772 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.127794 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.127820 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.127839 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:25Z","lastTransitionTime":"2025-09-30T00:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.177447 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lkdqg_5ec79e76-2a92-48e7-a55f-f8e630b00ed5/ovnkube-controller/3.log" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.178757 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lkdqg_5ec79e76-2a92-48e7-a55f-f8e630b00ed5/ovnkube-controller/2.log" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.184235 4809 generic.go:334] "Generic (PLEG): container finished" podID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerID="6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f" exitCode=1 Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.184302 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerDied","Data":"6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f"} Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.184360 4809 scope.go:117] "RemoveContainer" containerID="f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.185809 4809 scope.go:117] "RemoveContainer" containerID="6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f" Sep 30 00:10:25 crc kubenswrapper[4809]: E0930 00:10:25.186168 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-lkdqg_openshift-ovn-kubernetes(5ec79e76-2a92-48e7-a55f-f8e630b00ed5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.208439 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:25Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.231246 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.231311 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.231328 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.231359 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.231380 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:25Z","lastTransitionTime":"2025-09-30T00:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.233545 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f777bd5504881155c1d139890d3632b926ecfb2fb3b31aaf5db90fc06f0ad5ea\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:09:54Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 00:09:54.631531 6439 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 00:09:54.631568 6439 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:09:54.631603 6439 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 00:09:54.631625 6439 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 00:09:54.631633 6439 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 00:09:54.631683 6439 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 00:09:54.631694 6439 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 00:09:54.631707 6439 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 00:09:54.631701 6439 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:09:54.631720 6439 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 00:09:54.631728 6439 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:09:54.631739 6439 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 00:09:54.631753 6439 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:09:54.631795 6439 factory.go:656] Stopping watch factory\\\\nI0930 00:09:54.631810 6439 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:10:24Z\\\",\\\"message\\\":\\\" event handler 2 for removal\\\\nI0930 00:10:24.623302 6788 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 00:10:24.623350 6788 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:10:24.623366 6788 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:10:24.623399 6788 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 00:10:24.623419 6788 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:10:24.623430 6788 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 00:10:24.623451 6788 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 00:10:24.623458 6788 factory.go:656] Stopping watch factory\\\\nI0930 00:10:24.623462 6788 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 00:10:24.623477 6788 ovnkube.go:599] Stopped ovnkube\\\\nI0930 00:10:24.623479 6788 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 00:10:24.623486 6788 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:10:24.623518 6788 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 00:10:24.623544 6788 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0930 00:10:24.623638 6788 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nF0930 00:10:24.623695 6788 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:25Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.267684 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:25Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.284544 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:25Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.298431 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:25Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.313476 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:25Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.329794 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:25Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.340359 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.340782 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.340795 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.340813 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.340824 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:25Z","lastTransitionTime":"2025-09-30T00:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.346422 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"830a7e52-5a85-4555-8bcb-57828953b475\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e6afe1ae0b09081f5403143b4305f74ea3cac0f0ced46daef33d5601a15e7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5573285e4e2c628192a641224044c3a187785a35ff8f698318b1b2bd727d4dbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qdg49\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:25Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.360192 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a002504f-4eff-4d00-b908-f7628cbcd59e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77001c72a4c411736ea00880004e17f60ba4384f43da67cdd6a20fb9352add2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9432f80d6c9f58d48e1598fe727a0c255f16396ff5895cc9a7f9c95774e1c47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41030661a6009fd19ff2d46ec204a32d61d9b063c20f55e183b961611cd93eb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b602e4c0621a5285e8c596f35a817f599b5eda5bfdf69abb8cce7407e4167ae9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b602e4c0621a5285e8c596f35a817f599b5eda5bfdf69abb8cce7407e4167ae9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:25Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.371224 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43a82899-34fa-4d03-9856-7303839926c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4ktzq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:25Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.384799 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:25Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.398199 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26d9171f78f77e894d1ec968ed7a51f3bd6761bf1eb8ace2ce322c945c48f49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:25Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.410922 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:25Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.423857 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c471200a61cc3d7cb632c1e3690f62cfe5565dfda25ae3701efd13482755885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:10:18Z\\\",\\\"message\\\":\\\"2025-09-30T00:09:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e2673248-8c83-4306-bd00-85b169ca8400\\\\n2025-09-30T00:09:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e2673248-8c83-4306-bd00-85b169ca8400 to /host/opt/cni/bin/\\\\n2025-09-30T00:09:33Z [verbose] multus-daemon started\\\\n2025-09-30T00:09:33Z [verbose] Readiness Indicator file check\\\\n2025-09-30T00:10:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:25Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.443313 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.443385 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.443396 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.443410 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.443418 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:25Z","lastTransitionTime":"2025-09-30T00:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.445982 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:25Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.460939 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:25Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.479432 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:25Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.489428 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:25Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.546708 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.546754 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.546766 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.546786 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.546798 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:25Z","lastTransitionTime":"2025-09-30T00:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.649628 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.649761 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.649787 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.649818 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.649843 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:25Z","lastTransitionTime":"2025-09-30T00:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.690121 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.690162 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.690229 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.690130 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:25 crc kubenswrapper[4809]: E0930 00:10:25.690332 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:25 crc kubenswrapper[4809]: E0930 00:10:25.690468 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:25 crc kubenswrapper[4809]: E0930 00:10:25.690546 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:25 crc kubenswrapper[4809]: E0930 00:10:25.690592 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.752871 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.752910 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.752918 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.752933 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.752941 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:25Z","lastTransitionTime":"2025-09-30T00:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.856417 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.856485 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.856503 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.856528 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.856546 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:25Z","lastTransitionTime":"2025-09-30T00:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.959923 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.959993 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.960014 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.960045 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:25 crc kubenswrapper[4809]: I0930 00:10:25.960069 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:25Z","lastTransitionTime":"2025-09-30T00:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.063582 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.063702 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.063727 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.063758 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.063778 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:26Z","lastTransitionTime":"2025-09-30T00:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.167001 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.167117 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.167146 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.167180 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.167198 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:26Z","lastTransitionTime":"2025-09-30T00:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.191272 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lkdqg_5ec79e76-2a92-48e7-a55f-f8e630b00ed5/ovnkube-controller/3.log" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.197937 4809 scope.go:117] "RemoveContainer" containerID="6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f" Sep 30 00:10:26 crc kubenswrapper[4809]: E0930 00:10:26.198215 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-lkdqg_openshift-ovn-kubernetes(5ec79e76-2a92-48e7-a55f-f8e630b00ed5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.218352 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26d9171f78f77e894d1ec968ed7a51f3bd6761bf1eb8ace2ce322c945c48f49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:26Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.239432 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:26Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.255224 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:26Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.272560 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.272618 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.272636 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.272701 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.272724 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:26Z","lastTransitionTime":"2025-09-30T00:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.280185 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:26Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.294856 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:26Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.328247 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:26Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.361784 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:26Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.374932 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.374988 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.375000 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.375017 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.375030 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:26Z","lastTransitionTime":"2025-09-30T00:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.382529 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c471200a61cc3d7cb632c1e3690f62cfe5565dfda25ae3701efd13482755885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:10:18Z\\\",\\\"message\\\":\\\"2025-09-30T00:09:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e2673248-8c83-4306-bd00-85b169ca8400\\\\n2025-09-30T00:09:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e2673248-8c83-4306-bd00-85b169ca8400 to /host/opt/cni/bin/\\\\n2025-09-30T00:09:33Z [verbose] multus-daemon started\\\\n2025-09-30T00:09:33Z [verbose] Readiness Indicator file check\\\\n2025-09-30T00:10:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:26Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.399274 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:26Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.423401 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:10:24Z\\\",\\\"message\\\":\\\" event handler 2 for removal\\\\nI0930 00:10:24.623302 6788 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 00:10:24.623350 6788 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:10:24.623366 6788 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:10:24.623399 6788 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 00:10:24.623419 6788 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:10:24.623430 6788 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 00:10:24.623451 6788 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 00:10:24.623458 6788 factory.go:656] Stopping watch factory\\\\nI0930 00:10:24.623462 6788 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 00:10:24.623477 6788 ovnkube.go:599] Stopped ovnkube\\\\nI0930 00:10:24.623479 6788 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 00:10:24.623486 6788 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:10:24.623518 6788 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 00:10:24.623544 6788 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0930 00:10:24.623638 6788 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nF0930 00:10:24.623695 6788 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:10:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-lkdqg_openshift-ovn-kubernetes(5ec79e76-2a92-48e7-a55f-f8e630b00ed5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:26Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.449678 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:26Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.463193 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:26Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.478734 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:26Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.478958 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.478998 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.479046 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.479071 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.479083 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:26Z","lastTransitionTime":"2025-09-30T00:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.493566 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:26Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.504493 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"830a7e52-5a85-4555-8bcb-57828953b475\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e6afe1ae0b09081f5403143b4305f74ea3cac0f0ced46daef33d5601a15e7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5573285e4e2c628192a641224044c3a187785a35ff8f698318b1b2bd727d4dbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qdg49\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:26Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.520494 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a002504f-4eff-4d00-b908-f7628cbcd59e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77001c72a4c411736ea00880004e17f60ba4384f43da67cdd6a20fb9352add2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9432f80d6c9f58d48e1598fe727a0c255f16396ff5895cc9a7f9c95774e1c47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41030661a6009fd19ff2d46ec204a32d61d9b063c20f55e183b961611cd93eb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b602e4c0621a5285e8c596f35a817f599b5eda5bfdf69abb8cce7407e4167ae9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b602e4c0621a5285e8c596f35a817f599b5eda5bfdf69abb8cce7407e4167ae9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:26Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.543519 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:26Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.559802 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43a82899-34fa-4d03-9856-7303839926c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4ktzq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:26Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.581832 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.581860 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.581870 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.581885 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.581895 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:26Z","lastTransitionTime":"2025-09-30T00:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.684930 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.684976 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.684995 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.685017 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.685034 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:26Z","lastTransitionTime":"2025-09-30T00:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.787784 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.787831 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.787849 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.787871 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.787890 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:26Z","lastTransitionTime":"2025-09-30T00:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.890635 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.890749 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.890777 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.890809 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.890827 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:26Z","lastTransitionTime":"2025-09-30T00:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.994292 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.994423 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.994447 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.994478 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:26 crc kubenswrapper[4809]: I0930 00:10:26.994501 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:26Z","lastTransitionTime":"2025-09-30T00:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.096906 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.096965 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.096984 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.097009 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.097027 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:27Z","lastTransitionTime":"2025-09-30T00:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.199749 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.199804 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.199823 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.199851 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.199870 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:27Z","lastTransitionTime":"2025-09-30T00:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.303538 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.303617 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.303635 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.303701 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.303720 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:27Z","lastTransitionTime":"2025-09-30T00:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.407775 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.407857 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.407881 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.407911 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.407936 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:27Z","lastTransitionTime":"2025-09-30T00:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.486392 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.486457 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.486483 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.486512 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.486530 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:27Z","lastTransitionTime":"2025-09-30T00:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:27 crc kubenswrapper[4809]: E0930 00:10:27.507693 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:27Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.512999 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.513117 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.513178 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.513208 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.513228 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:27Z","lastTransitionTime":"2025-09-30T00:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:27 crc kubenswrapper[4809]: E0930 00:10:27.534300 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:27Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.540616 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.540725 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.540748 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.540785 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.540824 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:27Z","lastTransitionTime":"2025-09-30T00:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:27 crc kubenswrapper[4809]: E0930 00:10:27.562818 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:27Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.567750 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.567799 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.567822 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.567884 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.567908 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:27Z","lastTransitionTime":"2025-09-30T00:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:27 crc kubenswrapper[4809]: E0930 00:10:27.589451 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:27Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.594986 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.595066 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.595084 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.595115 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.595132 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:27Z","lastTransitionTime":"2025-09-30T00:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:27 crc kubenswrapper[4809]: E0930 00:10:27.616554 4809 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d9b712b9-d1e8-41f4-ba32-d2073fd76ca6\\\",\\\"systemUUID\\\":\\\"73ed9907-d828-4256-9134-2bd904afec40\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:27Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:27 crc kubenswrapper[4809]: E0930 00:10:27.616815 4809 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.619527 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.619583 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.619606 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.619670 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.619699 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:27Z","lastTransitionTime":"2025-09-30T00:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.690624 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.690806 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.690750 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.690910 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:27 crc kubenswrapper[4809]: E0930 00:10:27.691151 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:27 crc kubenswrapper[4809]: E0930 00:10:27.691318 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:27 crc kubenswrapper[4809]: E0930 00:10:27.691461 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:27 crc kubenswrapper[4809]: E0930 00:10:27.691605 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.727091 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.727283 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.727360 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.727391 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.727413 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:27Z","lastTransitionTime":"2025-09-30T00:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.830278 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.830335 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.830352 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.830374 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.830387 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:27Z","lastTransitionTime":"2025-09-30T00:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.933397 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.933886 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.933926 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.933974 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:27 crc kubenswrapper[4809]: I0930 00:10:27.934001 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:27Z","lastTransitionTime":"2025-09-30T00:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.037223 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.037297 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.037321 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.037347 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.037365 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:28Z","lastTransitionTime":"2025-09-30T00:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.141284 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.141355 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.141373 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.141398 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.141415 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:28Z","lastTransitionTime":"2025-09-30T00:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.244610 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.244731 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.244767 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.244795 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.244818 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:28Z","lastTransitionTime":"2025-09-30T00:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.348861 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.348932 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.348950 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.348975 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.348996 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:28Z","lastTransitionTime":"2025-09-30T00:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.452501 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.452574 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.452597 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.452628 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.452682 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:28Z","lastTransitionTime":"2025-09-30T00:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.556213 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.556284 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.556311 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.556339 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.556361 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:28Z","lastTransitionTime":"2025-09-30T00:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.659520 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.659663 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.659702 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.659745 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.659767 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:28Z","lastTransitionTime":"2025-09-30T00:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.763074 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.763125 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.763141 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.763167 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.763185 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:28Z","lastTransitionTime":"2025-09-30T00:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.866369 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.866404 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.866416 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.866431 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.866442 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:28Z","lastTransitionTime":"2025-09-30T00:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.970146 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.970182 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.970191 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.970209 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:28 crc kubenswrapper[4809]: I0930 00:10:28.970218 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:28Z","lastTransitionTime":"2025-09-30T00:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.073860 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.073894 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.073908 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.073925 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.073936 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:29Z","lastTransitionTime":"2025-09-30T00:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.177054 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.177100 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.177112 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.177128 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.177140 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:29Z","lastTransitionTime":"2025-09-30T00:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.279930 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.280014 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.280034 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.280060 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.280078 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:29Z","lastTransitionTime":"2025-09-30T00:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.385309 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.385411 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.385439 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.385470 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.385491 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:29Z","lastTransitionTime":"2025-09-30T00:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.488569 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.488635 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.488682 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.489423 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.489508 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:29Z","lastTransitionTime":"2025-09-30T00:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.592907 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.593020 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.593047 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.593082 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.593105 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:29Z","lastTransitionTime":"2025-09-30T00:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.690414 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.690488 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:29 crc kubenswrapper[4809]: E0930 00:10:29.691683 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.691726 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:29 crc kubenswrapper[4809]: E0930 00:10:29.691859 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.691925 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:29 crc kubenswrapper[4809]: E0930 00:10:29.692520 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:29 crc kubenswrapper[4809]: E0930 00:10:29.692773 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.695725 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.695779 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.695831 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.695853 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.695869 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:29Z","lastTransitionTime":"2025-09-30T00:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.708956 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"586fda5f-739d-46b1-9eb2-95315601e9cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3999f7d4d827437397559590784660d5576bc86198de79b0229e58ac4614792f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://adc242dd95a1a3c912b700b13134ad9db267a1a18a4d16dcb04650c7881c2554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://593dbe33082f373c6000799151dbb8742c3f50bf3c9c46acd264a82894bdf00c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://800e1a00c10b540d8961228a05fba7b8f6b360e81246662e52e242ebc7e5d20b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:29Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.731768 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:10:24Z\\\",\\\"message\\\":\\\" event handler 2 for removal\\\\nI0930 00:10:24.623302 6788 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 00:10:24.623350 6788 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 00:10:24.623366 6788 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 00:10:24.623399 6788 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 00:10:24.623419 6788 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 00:10:24.623430 6788 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 00:10:24.623451 6788 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 00:10:24.623458 6788 factory.go:656] Stopping watch factory\\\\nI0930 00:10:24.623462 6788 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 00:10:24.623477 6788 ovnkube.go:599] Stopped ovnkube\\\\nI0930 00:10:24.623479 6788 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 00:10:24.623486 6788 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 00:10:24.623518 6788 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 00:10:24.623544 6788 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0930 00:10:24.623638 6788 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nF0930 00:10:24.623695 6788 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:10:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-lkdqg_openshift-ovn-kubernetes(5ec79e76-2a92-48e7-a55f-f8e630b00ed5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdrpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lkdqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:29Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.747436 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:29Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.761344 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b428d5ff3d095fc37137e5230c53a26a8f45da26d92213085c2145d6cd8ad39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cc4b12bfdc63ee17987c91cd1effef7a8a9a760714d2b499c991e691d26f7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:29Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.773630 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"526944fa-517b-47ad-abf1-75683c7f70a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3aee337b41a968c4594882bc1e719fdc27bc5c413b5ddc917bb524becc27ba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mn7xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2zlhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:29Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.788264 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"830a7e52-5a85-4555-8bcb-57828953b475\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e6afe1ae0b09081f5403143b4305f74ea3cac0f0ced46daef33d5601a15e7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5573285e4e2c628192a641224044c3a187785a35ff8f698318b1b2bd727d4dbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nz87s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qdg49\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:29Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.798369 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.798407 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.798418 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.798457 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.798471 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:29Z","lastTransitionTime":"2025-09-30T00:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.801009 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a002504f-4eff-4d00-b908-f7628cbcd59e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77001c72a4c411736ea00880004e17f60ba4384f43da67cdd6a20fb9352add2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9432f80d6c9f58d48e1598fe727a0c255f16396ff5895cc9a7f9c95774e1c47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41030661a6009fd19ff2d46ec204a32d61d9b063c20f55e183b961611cd93eb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b602e4c0621a5285e8c596f35a817f599b5eda5bfdf69abb8cce7407e4167ae9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b602e4c0621a5285e8c596f35a817f599b5eda5bfdf69abb8cce7407e4167ae9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:29Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.841933 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed07bd38-858f-4029-a4fc-37e0377585b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://497040fcf05dad09a69ae550cc93c43c3ca6c2331a9bacd8fdecc534bdd87bc7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01d22935bb045ca55163238412a3e345eeaa4c4c20cb4b72a14ac21d217f559c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70e478ffac3cf737b187eb52bf3a8d157688d76d782fc4cdab3a9194b7a13118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d16dde25027e4cfb76277a0e72645a7812ee30f957efb0b24cf850ae669a262b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cc8a9ff2d7f58c23b0f3a6039aa6ab32f00382eaba6fe612a4c3058cbf1301d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5a4d31a3a942472af7d6dc94e1e841ed62c47be21d48b3d5c18fe21415c17c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c24b41e025336d865d845bab32aa8ee768b5d34b4525aee09d14ca79f996efa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2781676d4d0e51af9c3fac83a1769bf30f3ded6956191897ee9011def5377f75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:29Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.854786 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:29Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.865294 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43a82899-34fa-4d03-9856-7303839926c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwr5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4ktzq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:29Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.882429 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4ee2af06-5588-4d44-bf31-68d9fcd3a537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc52f484858b84698f1f380a3110dec81c450c2fe4108062a464f0cb9bcd2ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://744a9f941fbfc90d5581c989b289a934f3257459eadb01f9b04d19b24e469c80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cdf787586b77b620e845e661dd18771ef6e81408f956c157017bc6f088de1b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26d9171f78f77e894d1ec968ed7a51f3bd6761bf1eb8ace2ce322c945c48f49\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1116e10d116915720cf5eb3d91bf0d08eddafe6e5052cdfe5b6a47b28765c864\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\".557363 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 00:09:29.558371 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3222933459/tls.crt::/tmp/serving-cert-3222933459/tls.key\\\\\\\"\\\\nI0930 00:09:29.838940 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0930 00:09:29.844993 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0930 00:09:29.845050 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0930 00:09:29.845082 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0930 00:09:29.845088 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0930 00:09:29.852991 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0930 00:09:29.853043 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0930 00:09:29.853052 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0930 00:09:29.853055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0930 00:09:29.853058 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0930 00:09:29.853061 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0930 00:09:29.853216 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0930 00:09:29.857439 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0930 00:09:29.858871 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4d7d5a842828c0e62bb17210a3b494b1f914f0fac2a913e56d5438c1c84163e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://918f617c28ebf2fa61159b9fafc77ad53e3288fb8bc76fe42f75c49e9d4cfceb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:29Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.896329 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f341ee5a4eb03ce12f3cb86672f1533e98a33ac919b9f07a2da1f7529cad355\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:29Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.905347 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.905393 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.905402 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.905418 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.905429 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:29Z","lastTransitionTime":"2025-09-30T00:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.909929 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:29Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.923474 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-pkt8x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5bd29be-6d83-4a63-bf31-88e891913ba3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6c60c0e954dba5f8646e87ebbf88b2a96ec8ecf745755b8c8c7525720a55968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g7qdw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-pkt8x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:29Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.938137 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff17573921ae768c7d90324b22cd0a6e3e9f18a5fe240ddf7ada2866fe71611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:29Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.949695 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-v5x64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae2cb8c5-974c-4952-920a-9f194e953d8b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://043693c4154fc8744557d1487ba75fb5dbb733796a898c021f7145484208363d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kj5gf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-v5x64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:29Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.965299 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h6xqr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc7b2e1-7308-483a-9117-02e83c45a528\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c471200a61cc3d7cb632c1e3690f62cfe5565dfda25ae3701efd13482755885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T00:10:18Z\\\",\\\"message\\\":\\\"2025-09-30T00:09:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e2673248-8c83-4306-bd00-85b169ca8400\\\\n2025-09-30T00:09:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e2673248-8c83-4306-bd00-85b169ca8400 to /host/opt/cni/bin/\\\\n2025-09-30T00:09:33Z [verbose] multus-daemon started\\\\n2025-09-30T00:09:33Z [verbose] Readiness Indicator file check\\\\n2025-09-30T00:10:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75c7d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h6xqr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:29Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:29 crc kubenswrapper[4809]: I0930 00:10:29.981118 4809 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5a18711-80f7-42d7-a6a9-04996c22c1f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T00:09:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8ef545eb01bd8c9c6b3d3ab81737e134323b5f7794cf41fbebc9bfbea16c36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T00:09:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b73149a02d617fc862257ad18758c56a3b434cd87ab12b01e10af727e1a2e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0df7c9eb78068337aed1be3c9faeab00a5928d00cf241d8037956c7cb9be64a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745a03efce4fb38d7dff42fd1001d82032bd0dcdc52dd2d133e4950305d5f361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4675672247e9817718ca1657d5943ae99739901239c6b996131792bf7a8e6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9c920c1155bdede26d97a161699ac263243a5d345f959cf047e0a57faa2cb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46a71dbcedd211bd456904025aecab559e0b28ea313cc30455a82767cb90c545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T00:09:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T00:09:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8bb6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T00:09:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p8k9l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T00:10:29Z is after 2025-08-24T17:21:41Z" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.008568 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.008706 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.008734 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.008773 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.008797 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:30Z","lastTransitionTime":"2025-09-30T00:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.111759 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.111820 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.111838 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.111866 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.111885 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:30Z","lastTransitionTime":"2025-09-30T00:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.214823 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.214907 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.214932 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.214964 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.214991 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:30Z","lastTransitionTime":"2025-09-30T00:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.318195 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.318264 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.318283 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.318317 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.318336 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:30Z","lastTransitionTime":"2025-09-30T00:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.422583 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.422678 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.422706 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.422737 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.422755 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:30Z","lastTransitionTime":"2025-09-30T00:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.526876 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.526915 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.526932 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.526954 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.526972 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:30Z","lastTransitionTime":"2025-09-30T00:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.633590 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.633670 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.633689 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.633723 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.633739 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:30Z","lastTransitionTime":"2025-09-30T00:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.736764 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.736798 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.736808 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.736824 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.736835 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:30Z","lastTransitionTime":"2025-09-30T00:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.840707 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.840764 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.840786 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.840814 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.840837 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:30Z","lastTransitionTime":"2025-09-30T00:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.943439 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.943523 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.943544 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.943569 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:30 crc kubenswrapper[4809]: I0930 00:10:30.943586 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:30Z","lastTransitionTime":"2025-09-30T00:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.047604 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.047713 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.047741 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.047794 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.047824 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:31Z","lastTransitionTime":"2025-09-30T00:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.151512 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.151574 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.151595 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.151623 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.151672 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:31Z","lastTransitionTime":"2025-09-30T00:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.254060 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.254119 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.254139 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.254164 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.254182 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:31Z","lastTransitionTime":"2025-09-30T00:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.357950 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.358016 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.358039 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.358065 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.358082 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:31Z","lastTransitionTime":"2025-09-30T00:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.461110 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.461189 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.461212 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.461242 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.461264 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:31Z","lastTransitionTime":"2025-09-30T00:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.564945 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.565015 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.565032 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.565060 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.565079 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:31Z","lastTransitionTime":"2025-09-30T00:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.668532 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.668619 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.668677 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.668714 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.668737 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:31Z","lastTransitionTime":"2025-09-30T00:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.690135 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.690240 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.690291 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:31 crc kubenswrapper[4809]: E0930 00:10:31.690352 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.690383 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:31 crc kubenswrapper[4809]: E0930 00:10:31.690527 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:31 crc kubenswrapper[4809]: E0930 00:10:31.690743 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:31 crc kubenswrapper[4809]: E0930 00:10:31.690897 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.772066 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.772125 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.772142 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.772165 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.772181 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:31Z","lastTransitionTime":"2025-09-30T00:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.875243 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.875295 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.875303 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.875319 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.875329 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:31Z","lastTransitionTime":"2025-09-30T00:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.978714 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.978768 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.978784 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.978809 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:31 crc kubenswrapper[4809]: I0930 00:10:31.978826 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:31Z","lastTransitionTime":"2025-09-30T00:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.082322 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.082431 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.082456 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.082488 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.082511 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:32Z","lastTransitionTime":"2025-09-30T00:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.185712 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.185777 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.185795 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.185818 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.185839 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:32Z","lastTransitionTime":"2025-09-30T00:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.323305 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.323378 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.323402 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.323433 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.323456 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:32Z","lastTransitionTime":"2025-09-30T00:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.426507 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.426570 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.426588 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.426613 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.426632 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:32Z","lastTransitionTime":"2025-09-30T00:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.529842 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.529927 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.529955 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.529989 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.530012 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:32Z","lastTransitionTime":"2025-09-30T00:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.633978 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.634027 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.634044 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.634068 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.634085 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:32Z","lastTransitionTime":"2025-09-30T00:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.737424 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.737485 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.737503 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.737528 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.737546 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:32Z","lastTransitionTime":"2025-09-30T00:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.841272 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.841325 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.841341 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.841365 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.841383 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:32Z","lastTransitionTime":"2025-09-30T00:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.944169 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.944214 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.944227 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.944245 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:32 crc kubenswrapper[4809]: I0930 00:10:32.944258 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:32Z","lastTransitionTime":"2025-09-30T00:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.046539 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.046603 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.046625 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.046690 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.046713 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:33Z","lastTransitionTime":"2025-09-30T00:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.150055 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.150121 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.150139 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.150163 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.150180 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:33Z","lastTransitionTime":"2025-09-30T00:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.252762 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.252840 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.252863 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.252888 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.252909 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:33Z","lastTransitionTime":"2025-09-30T00:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.355795 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.355879 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.355893 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.355911 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.355922 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:33Z","lastTransitionTime":"2025-09-30T00:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.460202 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.460278 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.460295 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.460364 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.460382 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:33Z","lastTransitionTime":"2025-09-30T00:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.468936 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.469040 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:33 crc kubenswrapper[4809]: E0930 00:10:33.469114 4809 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 00:10:33 crc kubenswrapper[4809]: E0930 00:10:33.469174 4809 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 00:10:33 crc kubenswrapper[4809]: E0930 00:10:33.469224 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 00:11:37.469191911 +0000 UTC m=+148.505441359 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 00:10:33 crc kubenswrapper[4809]: E0930 00:10:33.469256 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 00:11:37.469242843 +0000 UTC m=+148.505492291 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.563121 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.563196 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.563215 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.563239 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.563256 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:33Z","lastTransitionTime":"2025-09-30T00:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.569689 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.569866 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:33 crc kubenswrapper[4809]: E0930 00:10:33.569887 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:37.569857115 +0000 UTC m=+148.606106563 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.569994 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:33 crc kubenswrapper[4809]: E0930 00:10:33.570100 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 00:10:33 crc kubenswrapper[4809]: E0930 00:10:33.570132 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 00:10:33 crc kubenswrapper[4809]: E0930 00:10:33.570151 4809 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:10:33 crc kubenswrapper[4809]: E0930 00:10:33.570173 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 00:10:33 crc kubenswrapper[4809]: E0930 00:10:33.570207 4809 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 00:10:33 crc kubenswrapper[4809]: E0930 00:10:33.570220 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 00:11:37.570201265 +0000 UTC m=+148.606450703 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:10:33 crc kubenswrapper[4809]: E0930 00:10:33.570231 4809 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:10:33 crc kubenswrapper[4809]: E0930 00:10:33.570300 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 00:11:37.570275037 +0000 UTC m=+148.606524495 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.666083 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.666150 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.666168 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.666192 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.666210 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:33Z","lastTransitionTime":"2025-09-30T00:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.690077 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.690157 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:33 crc kubenswrapper[4809]: E0930 00:10:33.690187 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.690272 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:33 crc kubenswrapper[4809]: E0930 00:10:33.690446 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.690692 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:33 crc kubenswrapper[4809]: E0930 00:10:33.690735 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:33 crc kubenswrapper[4809]: E0930 00:10:33.691083 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.769530 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.769601 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.769697 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.769733 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.769757 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:33Z","lastTransitionTime":"2025-09-30T00:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.873138 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.873198 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.873219 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.873246 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.873268 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:33Z","lastTransitionTime":"2025-09-30T00:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.976170 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.976232 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.976248 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.976272 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:33 crc kubenswrapper[4809]: I0930 00:10:33.976289 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:33Z","lastTransitionTime":"2025-09-30T00:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.080449 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.080508 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.080525 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.080550 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.080570 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:34Z","lastTransitionTime":"2025-09-30T00:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.183451 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.183497 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.183507 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.183523 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.183540 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:34Z","lastTransitionTime":"2025-09-30T00:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.286169 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.286215 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.286231 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.286249 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.286261 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:34Z","lastTransitionTime":"2025-09-30T00:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.389015 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.389066 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.389077 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.389096 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.389108 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:34Z","lastTransitionTime":"2025-09-30T00:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.491732 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.491796 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.491811 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.491827 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.491839 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:34Z","lastTransitionTime":"2025-09-30T00:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.594578 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.594676 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.594700 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.594729 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.594748 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:34Z","lastTransitionTime":"2025-09-30T00:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.697318 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.697357 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.697366 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.697378 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.697386 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:34Z","lastTransitionTime":"2025-09-30T00:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.800780 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.800837 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.800853 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.800878 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.800895 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:34Z","lastTransitionTime":"2025-09-30T00:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.904357 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.904422 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.904439 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.904463 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:34 crc kubenswrapper[4809]: I0930 00:10:34.904480 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:34Z","lastTransitionTime":"2025-09-30T00:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.007586 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.008056 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.008083 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.008112 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.008134 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:35Z","lastTransitionTime":"2025-09-30T00:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.110089 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.110159 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.110171 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.110185 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.110194 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:35Z","lastTransitionTime":"2025-09-30T00:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.213173 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.213235 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.213253 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.213277 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.213295 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:35Z","lastTransitionTime":"2025-09-30T00:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.314908 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.314956 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.314972 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.314993 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.315009 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:35Z","lastTransitionTime":"2025-09-30T00:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.417697 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.417740 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.417750 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.417765 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.417776 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:35Z","lastTransitionTime":"2025-09-30T00:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.521625 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.521686 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.521697 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.521712 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.521722 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:35Z","lastTransitionTime":"2025-09-30T00:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.625043 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.625107 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.625124 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.625148 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.625164 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:35Z","lastTransitionTime":"2025-09-30T00:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.690906 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.691002 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.690924 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:35 crc kubenswrapper[4809]: E0930 00:10:35.691097 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.691170 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:35 crc kubenswrapper[4809]: E0930 00:10:35.691328 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:35 crc kubenswrapper[4809]: E0930 00:10:35.691391 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:35 crc kubenswrapper[4809]: E0930 00:10:35.691471 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.728028 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.728090 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.728108 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.728136 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.728154 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:35Z","lastTransitionTime":"2025-09-30T00:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.833248 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.833304 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.833383 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.833411 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.833429 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:35Z","lastTransitionTime":"2025-09-30T00:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.939802 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.939878 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.939900 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.939930 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:35 crc kubenswrapper[4809]: I0930 00:10:35.939955 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:35Z","lastTransitionTime":"2025-09-30T00:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.044493 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.044529 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.044541 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.044556 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.044569 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:36Z","lastTransitionTime":"2025-09-30T00:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.148404 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.148431 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.148439 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.148452 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.148460 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:36Z","lastTransitionTime":"2025-09-30T00:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.251019 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.251286 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.251387 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.251459 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.251525 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:36Z","lastTransitionTime":"2025-09-30T00:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.353720 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.353779 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.353790 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.353810 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.353824 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:36Z","lastTransitionTime":"2025-09-30T00:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.456788 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.456818 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.456828 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.456843 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.456852 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:36Z","lastTransitionTime":"2025-09-30T00:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.559624 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.559723 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.559741 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.559764 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.559781 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:36Z","lastTransitionTime":"2025-09-30T00:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.663674 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.663735 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.663753 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.663778 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.663795 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:36Z","lastTransitionTime":"2025-09-30T00:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.766469 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.766539 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.766561 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.766612 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.766635 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:36Z","lastTransitionTime":"2025-09-30T00:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.870396 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.870464 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.870508 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.870541 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.870563 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:36Z","lastTransitionTime":"2025-09-30T00:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.973885 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.973968 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.973993 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.974026 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:36 crc kubenswrapper[4809]: I0930 00:10:36.974048 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:36Z","lastTransitionTime":"2025-09-30T00:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.077736 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.077801 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.077825 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.077851 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.077870 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:37Z","lastTransitionTime":"2025-09-30T00:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.181148 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.181219 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.181237 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.181263 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.181282 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:37Z","lastTransitionTime":"2025-09-30T00:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.284265 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.284343 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.284368 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.284402 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.284423 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:37Z","lastTransitionTime":"2025-09-30T00:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.388618 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.388708 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.388730 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.388765 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.388784 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:37Z","lastTransitionTime":"2025-09-30T00:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.491855 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.491914 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.491934 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.491956 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.491968 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:37Z","lastTransitionTime":"2025-09-30T00:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.594943 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.595002 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.595023 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.595045 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.595060 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:37Z","lastTransitionTime":"2025-09-30T00:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.658849 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.658951 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.658968 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.658999 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.659022 4809 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T00:10:37Z","lastTransitionTime":"2025-09-30T00:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.689981 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:37 crc kubenswrapper[4809]: E0930 00:10:37.690212 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.690971 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:37 crc kubenswrapper[4809]: E0930 00:10:37.691283 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.691432 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:37 crc kubenswrapper[4809]: E0930 00:10:37.691612 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.691865 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:37 crc kubenswrapper[4809]: E0930 00:10:37.691952 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.737168 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt"] Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.737794 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.740425 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.740856 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.741689 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.742585 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.789425 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=67.789389696 podStartE2EDuration="1m7.789389696s" podCreationTimestamp="2025-09-30 00:09:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:10:37.771316832 +0000 UTC m=+88.807566290" watchObservedRunningTime="2025-09-30 00:10:37.789389696 +0000 UTC m=+88.825639144" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.820206 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/dab9d14d-c115-49df-8006-ed72e1f7e226-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-vkrlt\" (UID: \"dab9d14d-c115-49df-8006-ed72e1f7e226\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.820266 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dab9d14d-c115-49df-8006-ed72e1f7e226-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-vkrlt\" (UID: \"dab9d14d-c115-49df-8006-ed72e1f7e226\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.820301 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dab9d14d-c115-49df-8006-ed72e1f7e226-service-ca\") pod \"cluster-version-operator-5c965bbfc6-vkrlt\" (UID: \"dab9d14d-c115-49df-8006-ed72e1f7e226\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.820384 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/dab9d14d-c115-49df-8006-ed72e1f7e226-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-vkrlt\" (UID: \"dab9d14d-c115-49df-8006-ed72e1f7e226\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.820428 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dab9d14d-c115-49df-8006-ed72e1f7e226-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-vkrlt\" (UID: \"dab9d14d-c115-49df-8006-ed72e1f7e226\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.866423 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-v5x64" podStartSLOduration=66.866381993 podStartE2EDuration="1m6.866381993s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:10:37.851589423 +0000 UTC m=+88.887838841" watchObservedRunningTime="2025-09-30 00:10:37.866381993 +0000 UTC m=+88.902631411" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.887067 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-h6xqr" podStartSLOduration=66.887045973 podStartE2EDuration="1m6.887045973s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:10:37.866858777 +0000 UTC m=+88.903108195" watchObservedRunningTime="2025-09-30 00:10:37.887045973 +0000 UTC m=+88.923295391" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.905807 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-p8k9l" podStartSLOduration=66.905775907 podStartE2EDuration="1m6.905775907s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:10:37.887544727 +0000 UTC m=+88.923794145" watchObservedRunningTime="2025-09-30 00:10:37.905775907 +0000 UTC m=+88.942025355" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.921558 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/dab9d14d-c115-49df-8006-ed72e1f7e226-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-vkrlt\" (UID: \"dab9d14d-c115-49df-8006-ed72e1f7e226\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.921615 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dab9d14d-c115-49df-8006-ed72e1f7e226-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-vkrlt\" (UID: \"dab9d14d-c115-49df-8006-ed72e1f7e226\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.921667 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/dab9d14d-c115-49df-8006-ed72e1f7e226-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-vkrlt\" (UID: \"dab9d14d-c115-49df-8006-ed72e1f7e226\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.921700 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dab9d14d-c115-49df-8006-ed72e1f7e226-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-vkrlt\" (UID: \"dab9d14d-c115-49df-8006-ed72e1f7e226\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.921730 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dab9d14d-c115-49df-8006-ed72e1f7e226-service-ca\") pod \"cluster-version-operator-5c965bbfc6-vkrlt\" (UID: \"dab9d14d-c115-49df-8006-ed72e1f7e226\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.921771 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/dab9d14d-c115-49df-8006-ed72e1f7e226-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-vkrlt\" (UID: \"dab9d14d-c115-49df-8006-ed72e1f7e226\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.921793 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/dab9d14d-c115-49df-8006-ed72e1f7e226-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-vkrlt\" (UID: \"dab9d14d-c115-49df-8006-ed72e1f7e226\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.922766 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dab9d14d-c115-49df-8006-ed72e1f7e226-service-ca\") pod \"cluster-version-operator-5c965bbfc6-vkrlt\" (UID: \"dab9d14d-c115-49df-8006-ed72e1f7e226\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.928173 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-pkt8x" podStartSLOduration=66.928149007 podStartE2EDuration="1m6.928149007s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:10:37.906384275 +0000 UTC m=+88.942633743" watchObservedRunningTime="2025-09-30 00:10:37.928149007 +0000 UTC m=+88.964398425" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.930886 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dab9d14d-c115-49df-8006-ed72e1f7e226-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-vkrlt\" (UID: \"dab9d14d-c115-49df-8006-ed72e1f7e226\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.940707 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dab9d14d-c115-49df-8006-ed72e1f7e226-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-vkrlt\" (UID: \"dab9d14d-c115-49df-8006-ed72e1f7e226\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.971595 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=66.971572578 podStartE2EDuration="1m6.971572578s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:10:37.928811576 +0000 UTC m=+88.965060994" watchObservedRunningTime="2025-09-30 00:10:37.971572578 +0000 UTC m=+89.007822006" Sep 30 00:10:37 crc kubenswrapper[4809]: I0930 00:10:37.989204 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podStartSLOduration=66.989179469 podStartE2EDuration="1m6.989179469s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:10:37.988294754 +0000 UTC m=+89.024544182" watchObservedRunningTime="2025-09-30 00:10:37.989179469 +0000 UTC m=+89.025428897" Sep 30 00:10:38 crc kubenswrapper[4809]: I0930 00:10:38.005760 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qdg49" podStartSLOduration=67.00573398 podStartE2EDuration="1m7.00573398s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:10:38.005073491 +0000 UTC m=+89.041322909" watchObservedRunningTime="2025-09-30 00:10:38.00573398 +0000 UTC m=+89.041983398" Sep 30 00:10:38 crc kubenswrapper[4809]: I0930 00:10:38.030084 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=36.030063877 podStartE2EDuration="36.030063877s" podCreationTimestamp="2025-09-30 00:10:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:10:38.029944644 +0000 UTC m=+89.066194092" watchObservedRunningTime="2025-09-30 00:10:38.030063877 +0000 UTC m=+89.066313305" Sep 30 00:10:38 crc kubenswrapper[4809]: I0930 00:10:38.059720 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=69.059692738 podStartE2EDuration="1m9.059692738s" podCreationTimestamp="2025-09-30 00:09:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:10:38.058769361 +0000 UTC m=+89.095018779" watchObservedRunningTime="2025-09-30 00:10:38.059692738 +0000 UTC m=+89.095942186" Sep 30 00:10:38 crc kubenswrapper[4809]: I0930 00:10:38.060897 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt" Sep 30 00:10:38 crc kubenswrapper[4809]: W0930 00:10:38.077887 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddab9d14d_c115_49df_8006_ed72e1f7e226.slice/crio-4c6ea3a06d5b92be3f510ee9e40f394d1536a834725bd99577b1d20025cad7f1 WatchSource:0}: Error finding container 4c6ea3a06d5b92be3f510ee9e40f394d1536a834725bd99577b1d20025cad7f1: Status 404 returned error can't find the container with id 4c6ea3a06d5b92be3f510ee9e40f394d1536a834725bd99577b1d20025cad7f1 Sep 30 00:10:38 crc kubenswrapper[4809]: I0930 00:10:38.242218 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt" event={"ID":"dab9d14d-c115-49df-8006-ed72e1f7e226","Type":"ContainerStarted","Data":"c699e4f66d1e4b6a4a73c923f3d8b11f1e090ab1bb6203f4e93f5ecb8531154b"} Sep 30 00:10:38 crc kubenswrapper[4809]: I0930 00:10:38.242281 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt" event={"ID":"dab9d14d-c115-49df-8006-ed72e1f7e226","Type":"ContainerStarted","Data":"4c6ea3a06d5b92be3f510ee9e40f394d1536a834725bd99577b1d20025cad7f1"} Sep 30 00:10:38 crc kubenswrapper[4809]: I0930 00:10:38.257704 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vkrlt" podStartSLOduration=67.257685138 podStartE2EDuration="1m7.257685138s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:10:38.256899106 +0000 UTC m=+89.293148554" watchObservedRunningTime="2025-09-30 00:10:38.257685138 +0000 UTC m=+89.293934556" Sep 30 00:10:39 crc kubenswrapper[4809]: I0930 00:10:39.690069 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:39 crc kubenswrapper[4809]: E0930 00:10:39.691916 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:39 crc kubenswrapper[4809]: I0930 00:10:39.691946 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:39 crc kubenswrapper[4809]: I0930 00:10:39.691983 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:39 crc kubenswrapper[4809]: I0930 00:10:39.692172 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:39 crc kubenswrapper[4809]: E0930 00:10:39.692359 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:39 crc kubenswrapper[4809]: E0930 00:10:39.692509 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:39 crc kubenswrapper[4809]: E0930 00:10:39.693284 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:39 crc kubenswrapper[4809]: I0930 00:10:39.693766 4809 scope.go:117] "RemoveContainer" containerID="6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f" Sep 30 00:10:39 crc kubenswrapper[4809]: E0930 00:10:39.694035 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-lkdqg_openshift-ovn-kubernetes(5ec79e76-2a92-48e7-a55f-f8e630b00ed5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" Sep 30 00:10:41 crc kubenswrapper[4809]: I0930 00:10:41.689982 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:41 crc kubenswrapper[4809]: I0930 00:10:41.690053 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:41 crc kubenswrapper[4809]: I0930 00:10:41.690365 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:41 crc kubenswrapper[4809]: I0930 00:10:41.690437 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:41 crc kubenswrapper[4809]: E0930 00:10:41.690551 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:41 crc kubenswrapper[4809]: E0930 00:10:41.690714 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:41 crc kubenswrapper[4809]: E0930 00:10:41.691031 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:41 crc kubenswrapper[4809]: E0930 00:10:41.691181 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:41 crc kubenswrapper[4809]: I0930 00:10:41.710149 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Sep 30 00:10:43 crc kubenswrapper[4809]: I0930 00:10:43.690910 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:43 crc kubenswrapper[4809]: I0930 00:10:43.690953 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:43 crc kubenswrapper[4809]: I0930 00:10:43.690953 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:43 crc kubenswrapper[4809]: I0930 00:10:43.691031 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:43 crc kubenswrapper[4809]: E0930 00:10:43.691154 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:43 crc kubenswrapper[4809]: E0930 00:10:43.691481 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:43 crc kubenswrapper[4809]: E0930 00:10:43.691576 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:43 crc kubenswrapper[4809]: E0930 00:10:43.691909 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:45 crc kubenswrapper[4809]: I0930 00:10:45.690863 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:45 crc kubenswrapper[4809]: I0930 00:10:45.690907 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:45 crc kubenswrapper[4809]: E0930 00:10:45.691360 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:45 crc kubenswrapper[4809]: I0930 00:10:45.690966 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:45 crc kubenswrapper[4809]: E0930 00:10:45.691408 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:45 crc kubenswrapper[4809]: I0930 00:10:45.690941 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:45 crc kubenswrapper[4809]: E0930 00:10:45.691449 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:45 crc kubenswrapper[4809]: E0930 00:10:45.691473 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:47 crc kubenswrapper[4809]: I0930 00:10:47.690594 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:47 crc kubenswrapper[4809]: I0930 00:10:47.690681 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:47 crc kubenswrapper[4809]: I0930 00:10:47.690614 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:47 crc kubenswrapper[4809]: I0930 00:10:47.690886 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:47 crc kubenswrapper[4809]: E0930 00:10:47.691166 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:47 crc kubenswrapper[4809]: E0930 00:10:47.691260 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:47 crc kubenswrapper[4809]: E0930 00:10:47.690816 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:47 crc kubenswrapper[4809]: E0930 00:10:47.691434 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:49 crc kubenswrapper[4809]: I0930 00:10:49.690424 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:49 crc kubenswrapper[4809]: I0930 00:10:49.690496 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:49 crc kubenswrapper[4809]: I0930 00:10:49.690370 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:49 crc kubenswrapper[4809]: E0930 00:10:49.692124 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:49 crc kubenswrapper[4809]: I0930 00:10:49.692165 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:49 crc kubenswrapper[4809]: E0930 00:10:49.692253 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:49 crc kubenswrapper[4809]: E0930 00:10:49.692362 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:49 crc kubenswrapper[4809]: E0930 00:10:49.692522 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:49 crc kubenswrapper[4809]: I0930 00:10:49.712027 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=8.71200609 podStartE2EDuration="8.71200609s" podCreationTimestamp="2025-09-30 00:10:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:10:49.711222957 +0000 UTC m=+100.747472395" watchObservedRunningTime="2025-09-30 00:10:49.71200609 +0000 UTC m=+100.748255538" Sep 30 00:10:50 crc kubenswrapper[4809]: I0930 00:10:50.266253 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs\") pod \"network-metrics-daemon-4ktzq\" (UID: \"43a82899-34fa-4d03-9856-7303839926c2\") " pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:50 crc kubenswrapper[4809]: E0930 00:10:50.266444 4809 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 00:10:50 crc kubenswrapper[4809]: E0930 00:10:50.266516 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs podName:43a82899-34fa-4d03-9856-7303839926c2 nodeName:}" failed. No retries permitted until 2025-09-30 00:11:54.266492325 +0000 UTC m=+165.302741763 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs") pod "network-metrics-daemon-4ktzq" (UID: "43a82899-34fa-4d03-9856-7303839926c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 00:10:51 crc kubenswrapper[4809]: I0930 00:10:51.690815 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:51 crc kubenswrapper[4809]: I0930 00:10:51.690851 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:51 crc kubenswrapper[4809]: I0930 00:10:51.690896 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:51 crc kubenswrapper[4809]: I0930 00:10:51.690815 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:51 crc kubenswrapper[4809]: E0930 00:10:51.691029 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:51 crc kubenswrapper[4809]: E0930 00:10:51.691159 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:51 crc kubenswrapper[4809]: E0930 00:10:51.691330 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:51 crc kubenswrapper[4809]: E0930 00:10:51.691432 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:53 crc kubenswrapper[4809]: I0930 00:10:53.690710 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:53 crc kubenswrapper[4809]: I0930 00:10:53.690769 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:53 crc kubenswrapper[4809]: I0930 00:10:53.690724 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:53 crc kubenswrapper[4809]: I0930 00:10:53.690868 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:53 crc kubenswrapper[4809]: E0930 00:10:53.690979 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:53 crc kubenswrapper[4809]: E0930 00:10:53.691435 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:53 crc kubenswrapper[4809]: E0930 00:10:53.691484 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:53 crc kubenswrapper[4809]: E0930 00:10:53.691802 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:54 crc kubenswrapper[4809]: I0930 00:10:54.691563 4809 scope.go:117] "RemoveContainer" containerID="6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f" Sep 30 00:10:54 crc kubenswrapper[4809]: E0930 00:10:54.691863 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-lkdqg_openshift-ovn-kubernetes(5ec79e76-2a92-48e7-a55f-f8e630b00ed5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" Sep 30 00:10:55 crc kubenswrapper[4809]: I0930 00:10:55.689922 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:55 crc kubenswrapper[4809]: I0930 00:10:55.689989 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:55 crc kubenswrapper[4809]: I0930 00:10:55.690032 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:55 crc kubenswrapper[4809]: E0930 00:10:55.690062 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:55 crc kubenswrapper[4809]: E0930 00:10:55.690246 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:55 crc kubenswrapper[4809]: I0930 00:10:55.690369 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:55 crc kubenswrapper[4809]: E0930 00:10:55.690505 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:55 crc kubenswrapper[4809]: E0930 00:10:55.690379 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:57 crc kubenswrapper[4809]: I0930 00:10:57.689994 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:57 crc kubenswrapper[4809]: I0930 00:10:57.690124 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:57 crc kubenswrapper[4809]: I0930 00:10:57.690015 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:57 crc kubenswrapper[4809]: I0930 00:10:57.690259 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:57 crc kubenswrapper[4809]: E0930 00:10:57.690244 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:57 crc kubenswrapper[4809]: E0930 00:10:57.690471 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:57 crc kubenswrapper[4809]: E0930 00:10:57.690576 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:10:57 crc kubenswrapper[4809]: E0930 00:10:57.690713 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:59 crc kubenswrapper[4809]: I0930 00:10:59.690108 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:10:59 crc kubenswrapper[4809]: I0930 00:10:59.690346 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:10:59 crc kubenswrapper[4809]: I0930 00:10:59.690254 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:10:59 crc kubenswrapper[4809]: I0930 00:10:59.690141 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:10:59 crc kubenswrapper[4809]: E0930 00:10:59.692424 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:10:59 crc kubenswrapper[4809]: E0930 00:10:59.692834 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:10:59 crc kubenswrapper[4809]: E0930 00:10:59.693023 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:10:59 crc kubenswrapper[4809]: E0930 00:10:59.693088 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:11:01 crc kubenswrapper[4809]: I0930 00:11:01.690568 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:11:01 crc kubenswrapper[4809]: I0930 00:11:01.690601 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:11:01 crc kubenswrapper[4809]: E0930 00:11:01.690981 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:11:01 crc kubenswrapper[4809]: I0930 00:11:01.691037 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:11:01 crc kubenswrapper[4809]: E0930 00:11:01.691220 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:11:01 crc kubenswrapper[4809]: E0930 00:11:01.691377 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:11:01 crc kubenswrapper[4809]: I0930 00:11:01.691429 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:11:01 crc kubenswrapper[4809]: E0930 00:11:01.691578 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:11:03 crc kubenswrapper[4809]: I0930 00:11:03.690627 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:11:03 crc kubenswrapper[4809]: I0930 00:11:03.690693 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:11:03 crc kubenswrapper[4809]: I0930 00:11:03.690756 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:11:03 crc kubenswrapper[4809]: E0930 00:11:03.690867 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:11:03 crc kubenswrapper[4809]: I0930 00:11:03.690941 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:11:03 crc kubenswrapper[4809]: E0930 00:11:03.691061 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:11:03 crc kubenswrapper[4809]: E0930 00:11:03.691157 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:11:03 crc kubenswrapper[4809]: E0930 00:11:03.691204 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:11:05 crc kubenswrapper[4809]: I0930 00:11:05.339361 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-h6xqr_efc7b2e1-7308-483a-9117-02e83c45a528/kube-multus/1.log" Sep 30 00:11:05 crc kubenswrapper[4809]: I0930 00:11:05.340455 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-h6xqr_efc7b2e1-7308-483a-9117-02e83c45a528/kube-multus/0.log" Sep 30 00:11:05 crc kubenswrapper[4809]: I0930 00:11:05.340529 4809 generic.go:334] "Generic (PLEG): container finished" podID="efc7b2e1-7308-483a-9117-02e83c45a528" containerID="2c471200a61cc3d7cb632c1e3690f62cfe5565dfda25ae3701efd13482755885" exitCode=1 Sep 30 00:11:05 crc kubenswrapper[4809]: I0930 00:11:05.340582 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-h6xqr" event={"ID":"efc7b2e1-7308-483a-9117-02e83c45a528","Type":"ContainerDied","Data":"2c471200a61cc3d7cb632c1e3690f62cfe5565dfda25ae3701efd13482755885"} Sep 30 00:11:05 crc kubenswrapper[4809]: I0930 00:11:05.340674 4809 scope.go:117] "RemoveContainer" containerID="a3dfa1f4d5282beb0e0f47701288b7396c0d3bbdbc9e7bb436620393f720bca3" Sep 30 00:11:05 crc kubenswrapper[4809]: I0930 00:11:05.341289 4809 scope.go:117] "RemoveContainer" containerID="2c471200a61cc3d7cb632c1e3690f62cfe5565dfda25ae3701efd13482755885" Sep 30 00:11:05 crc kubenswrapper[4809]: E0930 00:11:05.341574 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-h6xqr_openshift-multus(efc7b2e1-7308-483a-9117-02e83c45a528)\"" pod="openshift-multus/multus-h6xqr" podUID="efc7b2e1-7308-483a-9117-02e83c45a528" Sep 30 00:11:05 crc kubenswrapper[4809]: I0930 00:11:05.690495 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:11:05 crc kubenswrapper[4809]: I0930 00:11:05.690556 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:11:05 crc kubenswrapper[4809]: I0930 00:11:05.690626 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:11:05 crc kubenswrapper[4809]: E0930 00:11:05.690615 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:11:05 crc kubenswrapper[4809]: I0930 00:11:05.690850 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:11:05 crc kubenswrapper[4809]: E0930 00:11:05.690849 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:11:05 crc kubenswrapper[4809]: E0930 00:11:05.690908 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:11:05 crc kubenswrapper[4809]: E0930 00:11:05.690965 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:11:06 crc kubenswrapper[4809]: I0930 00:11:06.346250 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-h6xqr_efc7b2e1-7308-483a-9117-02e83c45a528/kube-multus/1.log" Sep 30 00:11:07 crc kubenswrapper[4809]: I0930 00:11:07.690857 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:11:07 crc kubenswrapper[4809]: I0930 00:11:07.691401 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:11:07 crc kubenswrapper[4809]: I0930 00:11:07.691443 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:11:07 crc kubenswrapper[4809]: E0930 00:11:07.691411 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:11:07 crc kubenswrapper[4809]: I0930 00:11:07.691961 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:11:07 crc kubenswrapper[4809]: E0930 00:11:07.692063 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:11:07 crc kubenswrapper[4809]: E0930 00:11:07.692131 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:11:07 crc kubenswrapper[4809]: E0930 00:11:07.692974 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:11:07 crc kubenswrapper[4809]: I0930 00:11:07.693383 4809 scope.go:117] "RemoveContainer" containerID="6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f" Sep 30 00:11:08 crc kubenswrapper[4809]: I0930 00:11:08.354245 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lkdqg_5ec79e76-2a92-48e7-a55f-f8e630b00ed5/ovnkube-controller/3.log" Sep 30 00:11:08 crc kubenswrapper[4809]: I0930 00:11:08.356617 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerStarted","Data":"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c"} Sep 30 00:11:08 crc kubenswrapper[4809]: I0930 00:11:08.357129 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:11:08 crc kubenswrapper[4809]: I0930 00:11:08.690014 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" podStartSLOduration=97.689989985 podStartE2EDuration="1m37.689989985s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:08.396202531 +0000 UTC m=+119.432451949" watchObservedRunningTime="2025-09-30 00:11:08.689989985 +0000 UTC m=+119.726239403" Sep 30 00:11:08 crc kubenswrapper[4809]: I0930 00:11:08.691119 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-4ktzq"] Sep 30 00:11:08 crc kubenswrapper[4809]: I0930 00:11:08.691292 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:11:08 crc kubenswrapper[4809]: E0930 00:11:08.691426 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:11:09 crc kubenswrapper[4809]: E0930 00:11:09.653174 4809 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Sep 30 00:11:09 crc kubenswrapper[4809]: I0930 00:11:09.689974 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:11:09 crc kubenswrapper[4809]: I0930 00:11:09.689984 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:11:09 crc kubenswrapper[4809]: I0930 00:11:09.690035 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:11:09 crc kubenswrapper[4809]: E0930 00:11:09.691058 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:11:09 crc kubenswrapper[4809]: E0930 00:11:09.691263 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:11:09 crc kubenswrapper[4809]: E0930 00:11:09.691375 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:11:09 crc kubenswrapper[4809]: E0930 00:11:09.794845 4809 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 00:11:10 crc kubenswrapper[4809]: I0930 00:11:10.690407 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:11:10 crc kubenswrapper[4809]: E0930 00:11:10.690625 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:11:11 crc kubenswrapper[4809]: I0930 00:11:11.690490 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:11:11 crc kubenswrapper[4809]: I0930 00:11:11.690585 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:11:11 crc kubenswrapper[4809]: E0930 00:11:11.690698 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:11:11 crc kubenswrapper[4809]: I0930 00:11:11.690747 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:11:11 crc kubenswrapper[4809]: E0930 00:11:11.690882 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:11:11 crc kubenswrapper[4809]: E0930 00:11:11.690970 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:11:12 crc kubenswrapper[4809]: I0930 00:11:12.690631 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:11:12 crc kubenswrapper[4809]: E0930 00:11:12.690886 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:11:13 crc kubenswrapper[4809]: I0930 00:11:13.690342 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:11:13 crc kubenswrapper[4809]: I0930 00:11:13.690477 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:11:13 crc kubenswrapper[4809]: E0930 00:11:13.690629 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:11:13 crc kubenswrapper[4809]: I0930 00:11:13.690709 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:11:13 crc kubenswrapper[4809]: E0930 00:11:13.690913 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:11:13 crc kubenswrapper[4809]: E0930 00:11:13.691048 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:11:14 crc kubenswrapper[4809]: I0930 00:11:14.690592 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:11:14 crc kubenswrapper[4809]: E0930 00:11:14.690823 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:11:14 crc kubenswrapper[4809]: E0930 00:11:14.796405 4809 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 00:11:15 crc kubenswrapper[4809]: I0930 00:11:15.690820 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:11:15 crc kubenswrapper[4809]: I0930 00:11:15.690894 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:11:15 crc kubenswrapper[4809]: I0930 00:11:15.690853 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:11:15 crc kubenswrapper[4809]: E0930 00:11:15.691055 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:11:15 crc kubenswrapper[4809]: E0930 00:11:15.691175 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:11:15 crc kubenswrapper[4809]: E0930 00:11:15.691310 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:11:16 crc kubenswrapper[4809]: I0930 00:11:16.689984 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:11:16 crc kubenswrapper[4809]: I0930 00:11:16.690465 4809 scope.go:117] "RemoveContainer" containerID="2c471200a61cc3d7cb632c1e3690f62cfe5565dfda25ae3701efd13482755885" Sep 30 00:11:16 crc kubenswrapper[4809]: E0930 00:11:16.690480 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:11:17 crc kubenswrapper[4809]: I0930 00:11:17.386683 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-h6xqr_efc7b2e1-7308-483a-9117-02e83c45a528/kube-multus/1.log" Sep 30 00:11:17 crc kubenswrapper[4809]: I0930 00:11:17.387070 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-h6xqr" event={"ID":"efc7b2e1-7308-483a-9117-02e83c45a528","Type":"ContainerStarted","Data":"f5c595bedd8ad8ab824b5c4f31b23ff132f383cd0e37b5ad71bd7e639e3a350a"} Sep 30 00:11:17 crc kubenswrapper[4809]: I0930 00:11:17.690858 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:11:17 crc kubenswrapper[4809]: I0930 00:11:17.690957 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:11:17 crc kubenswrapper[4809]: E0930 00:11:17.691045 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:11:17 crc kubenswrapper[4809]: I0930 00:11:17.690953 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:11:17 crc kubenswrapper[4809]: E0930 00:11:17.691127 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:11:17 crc kubenswrapper[4809]: E0930 00:11:17.691242 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:11:18 crc kubenswrapper[4809]: I0930 00:11:18.690524 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:11:18 crc kubenswrapper[4809]: E0930 00:11:18.690761 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4ktzq" podUID="43a82899-34fa-4d03-9856-7303839926c2" Sep 30 00:11:19 crc kubenswrapper[4809]: I0930 00:11:19.690163 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:11:19 crc kubenswrapper[4809]: E0930 00:11:19.690336 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 00:11:19 crc kubenswrapper[4809]: I0930 00:11:19.690984 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:11:19 crc kubenswrapper[4809]: E0930 00:11:19.692538 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 00:11:19 crc kubenswrapper[4809]: I0930 00:11:19.692588 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:11:19 crc kubenswrapper[4809]: E0930 00:11:19.692770 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 00:11:20 crc kubenswrapper[4809]: I0930 00:11:20.690845 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:11:20 crc kubenswrapper[4809]: I0930 00:11:20.695070 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Sep 30 00:11:20 crc kubenswrapper[4809]: I0930 00:11:20.695116 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Sep 30 00:11:21 crc kubenswrapper[4809]: I0930 00:11:21.690831 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:11:21 crc kubenswrapper[4809]: I0930 00:11:21.690904 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:11:21 crc kubenswrapper[4809]: I0930 00:11:21.690998 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:11:21 crc kubenswrapper[4809]: I0930 00:11:21.694742 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Sep 30 00:11:21 crc kubenswrapper[4809]: I0930 00:11:21.694954 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Sep 30 00:11:21 crc kubenswrapper[4809]: I0930 00:11:21.694972 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Sep 30 00:11:21 crc kubenswrapper[4809]: I0930 00:11:21.697058 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Sep 30 00:11:23 crc kubenswrapper[4809]: I0930 00:11:23.611403 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.481923 4809 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.550257 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-gjq9t"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.550990 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.554902 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.557536 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.557807 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.557949 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.558268 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-962sz"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.558840 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.558937 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.560531 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-rnr6q"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.561006 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rnr6q" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.561570 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-zlcqw"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.562844 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-zlcqw" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.563382 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-jqgvc"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.564425 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jqgvc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.564892 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-pruner-29319840-n77h7"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.565572 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29319840-n77h7" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.572699 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lgzpz"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.573255 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.574886 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-d9lrc"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.575844 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.578225 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.579799 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.584277 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m4bcx"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.584907 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m4bcx" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.588561 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-62g6c"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.589461 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-62g6c" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.590381 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-4hg76"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.591413 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-4hg76" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.599614 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.601771 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.602030 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.602519 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.602855 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.604707 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-5gnw9"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.605296 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.605697 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9sszs"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.606254 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.625890 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.626912 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.627022 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.626243 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.626324 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.626378 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.626391 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.626470 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.626714 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.626740 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.636406 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.636586 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.636726 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.636870 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651076 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4f92806-fad5-406a-92ba-e668b4e9cede-config\") pod \"openshift-apiserver-operator-796bbdcf4f-m4bcx\" (UID: \"d4f92806-fad5-406a-92ba-e668b4e9cede\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m4bcx" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651139 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4edad23e-9bba-4744-904e-a4960e17ad69-config\") pod \"authentication-operator-69f744f599-62g6c\" (UID: \"4edad23e-9bba-4744-904e-a4960e17ad69\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-62g6c" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651156 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651172 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651190 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/60c02e8f-1cb7-402b-a065-2f3b207cf60b-audit\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651202 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/60c02e8f-1cb7-402b-a065-2f3b207cf60b-etcd-client\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651219 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651235 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh672\" (UniqueName: \"kubernetes.io/projected/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-kube-api-access-gh672\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651253 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/60c02e8f-1cb7-402b-a065-2f3b207cf60b-serving-cert\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651274 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b86e98a3-9c3b-4d66-b71c-535882b764d3-config\") pod \"etcd-operator-b45778765-9sszs\" (UID: \"b86e98a3-9c3b-4d66-b71c-535882b764d3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651290 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5211fa59-5683-47eb-bfa9-100d466cd1d5-serving-cert\") pod \"route-controller-manager-6576b87f9c-pzxnq\" (UID: \"5211fa59-5683-47eb-bfa9-100d466cd1d5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651303 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/60c02e8f-1cb7-402b-a065-2f3b207cf60b-node-pullsecrets\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651317 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-oauth-serving-cert\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651330 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651346 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/88723807-e4e6-48ce-9d84-a66a57863496-metrics-certs\") pod \"router-default-5444994796-5gnw9\" (UID: \"88723807-e4e6-48ce-9d84-a66a57863496\") " pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651379 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5c458d1d-1777-46d0-a001-ebb0cae0e16f-etcd-client\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651399 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60c02e8f-1cb7-402b-a065-2f3b207cf60b-config\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651418 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5c458d1d-1777-46d0-a001-ebb0cae0e16f-audit-dir\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651438 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5gw9\" (UniqueName: \"kubernetes.io/projected/5c458d1d-1777-46d0-a001-ebb0cae0e16f-kube-api-access-h5gw9\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651454 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/60c02e8f-1cb7-402b-a065-2f3b207cf60b-encryption-config\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651469 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f96w4\" (UniqueName: \"kubernetes.io/projected/4edad23e-9bba-4744-904e-a4960e17ad69-kube-api-access-f96w4\") pod \"authentication-operator-69f744f599-62g6c\" (UID: \"4edad23e-9bba-4744-904e-a4960e17ad69\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-62g6c" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651490 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5211fa59-5683-47eb-bfa9-100d466cd1d5-client-ca\") pod \"route-controller-manager-6576b87f9c-pzxnq\" (UID: \"5211fa59-5683-47eb-bfa9-100d466cd1d5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651512 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/164d8b36-2115-4fd0-8404-9aec777ad0fa-metrics-tls\") pod \"dns-operator-744455d44c-4hg76\" (UID: \"164d8b36-2115-4fd0-8404-9aec777ad0fa\") " pod="openshift-dns-operator/dns-operator-744455d44c-4hg76" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651534 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zh6fw\" (UniqueName: \"kubernetes.io/projected/ca4b56e9-c63b-41a3-8182-90019963009f-kube-api-access-zh6fw\") pod \"machine-api-operator-5694c8668f-zlcqw\" (UID: \"ca4b56e9-c63b-41a3-8182-90019963009f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zlcqw" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651554 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5xzf\" (UniqueName: \"kubernetes.io/projected/6977dfac-797f-4284-ab89-48d3f92f332d-kube-api-access-v5xzf\") pod \"openshift-config-operator-7777fb866f-rnr6q\" (UID: \"6977dfac-797f-4284-ab89-48d3f92f332d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rnr6q" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651577 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/88723807-e4e6-48ce-9d84-a66a57863496-stats-auth\") pod \"router-default-5444994796-5gnw9\" (UID: \"88723807-e4e6-48ce-9d84-a66a57863496\") " pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651599 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8339f60-b243-4c2e-b416-86ef26c104a0-config\") pod \"controller-manager-879f6c89f-lgzpz\" (UID: \"d8339f60-b243-4c2e-b416-86ef26c104a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651618 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d4f92806-fad5-406a-92ba-e668b4e9cede-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-m4bcx\" (UID: \"d4f92806-fad5-406a-92ba-e668b4e9cede\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m4bcx" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651634 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5211fa59-5683-47eb-bfa9-100d466cd1d5-config\") pod \"route-controller-manager-6576b87f9c-pzxnq\" (UID: \"5211fa59-5683-47eb-bfa9-100d466cd1d5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651670 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8339f60-b243-4c2e-b416-86ef26c104a0-serving-cert\") pod \"controller-manager-879f6c89f-lgzpz\" (UID: \"d8339f60-b243-4c2e-b416-86ef26c104a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651691 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca4b56e9-c63b-41a3-8182-90019963009f-config\") pod \"machine-api-operator-5694c8668f-zlcqw\" (UID: \"ca4b56e9-c63b-41a3-8182-90019963009f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zlcqw" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651712 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651732 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbblc\" (UniqueName: \"kubernetes.io/projected/164d8b36-2115-4fd0-8404-9aec777ad0fa-kube-api-access-rbblc\") pod \"dns-operator-744455d44c-4hg76\" (UID: \"164d8b36-2115-4fd0-8404-9aec777ad0fa\") " pod="openshift-dns-operator/dns-operator-744455d44c-4hg76" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651749 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/88723807-e4e6-48ce-9d84-a66a57863496-default-certificate\") pod \"router-default-5444994796-5gnw9\" (UID: \"88723807-e4e6-48ce-9d84-a66a57863496\") " pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651764 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651777 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651793 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4edad23e-9bba-4744-904e-a4960e17ad69-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-62g6c\" (UID: \"4edad23e-9bba-4744-904e-a4960e17ad69\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-62g6c" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651808 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/aa0c5241-c63d-4e8d-9001-3cf220a3182e-auth-proxy-config\") pod \"machine-approver-56656f9798-jqgvc\" (UID: \"aa0c5241-c63d-4e8d-9001-3cf220a3182e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jqgvc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651823 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ca4b56e9-c63b-41a3-8182-90019963009f-images\") pod \"machine-api-operator-5694c8668f-zlcqw\" (UID: \"ca4b56e9-c63b-41a3-8182-90019963009f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zlcqw" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651837 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/6977dfac-797f-4284-ab89-48d3f92f332d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-rnr6q\" (UID: \"6977dfac-797f-4284-ab89-48d3f92f332d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rnr6q" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651851 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqrzm\" (UniqueName: \"kubernetes.io/projected/88723807-e4e6-48ce-9d84-a66a57863496-kube-api-access-vqrzm\") pod \"router-default-5444994796-5gnw9\" (UID: \"88723807-e4e6-48ce-9d84-a66a57863496\") " pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651865 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-audit-policies\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651880 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5c458d1d-1777-46d0-a001-ebb0cae0e16f-audit-policies\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651893 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4edad23e-9bba-4744-904e-a4960e17ad69-service-ca-bundle\") pod \"authentication-operator-69f744f599-62g6c\" (UID: \"4edad23e-9bba-4744-904e-a4960e17ad69\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-62g6c" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651907 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ca4b56e9-c63b-41a3-8182-90019963009f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-zlcqw\" (UID: \"ca4b56e9-c63b-41a3-8182-90019963009f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zlcqw" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651922 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgh4n\" (UniqueName: \"kubernetes.io/projected/d8339f60-b243-4c2e-b416-86ef26c104a0-kube-api-access-kgh4n\") pod \"controller-manager-879f6c89f-lgzpz\" (UID: \"d8339f60-b243-4c2e-b416-86ef26c104a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651937 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/60c02e8f-1cb7-402b-a065-2f3b207cf60b-audit-dir\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651951 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6977dfac-797f-4284-ab89-48d3f92f332d-serving-cert\") pod \"openshift-config-operator-7777fb866f-rnr6q\" (UID: \"6977dfac-797f-4284-ab89-48d3f92f332d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rnr6q" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651965 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88723807-e4e6-48ce-9d84-a66a57863496-service-ca-bundle\") pod \"router-default-5444994796-5gnw9\" (UID: \"88723807-e4e6-48ce-9d84-a66a57863496\") " pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651979 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-console-oauth-config\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.651994 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hsgs\" (UniqueName: \"kubernetes.io/projected/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-kube-api-access-7hsgs\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652013 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652034 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4edad23e-9bba-4744-904e-a4960e17ad69-serving-cert\") pod \"authentication-operator-69f744f599-62g6c\" (UID: \"4edad23e-9bba-4744-904e-a4960e17ad69\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-62g6c" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652051 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-console-serving-cert\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652076 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/b86e98a3-9c3b-4d66-b71c-535882b764d3-etcd-ca\") pod \"etcd-operator-b45778765-9sszs\" (UID: \"b86e98a3-9c3b-4d66-b71c-535882b764d3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652094 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-console-config\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652112 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/5c458d1d-1777-46d0-a001-ebb0cae0e16f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652134 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5c458d1d-1777-46d0-a001-ebb0cae0e16f-serving-cert\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652153 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/5c458d1d-1777-46d0-a001-ebb0cae0e16f-encryption-config\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652171 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-service-ca\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652192 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/60c02e8f-1cb7-402b-a065-2f3b207cf60b-etcd-serving-ca\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652208 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/60c02e8f-1cb7-402b-a065-2f3b207cf60b-image-import-ca\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652227 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/60c02e8f-1cb7-402b-a065-2f3b207cf60b-trusted-ca-bundle\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652243 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa0c5241-c63d-4e8d-9001-3cf220a3182e-config\") pod \"machine-approver-56656f9798-jqgvc\" (UID: \"aa0c5241-c63d-4e8d-9001-3cf220a3182e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jqgvc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652256 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-trusted-ca-bundle\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652269 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652285 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jx9rh\" (UniqueName: \"kubernetes.io/projected/b86e98a3-9c3b-4d66-b71c-535882b764d3-kube-api-access-jx9rh\") pod \"etcd-operator-b45778765-9sszs\" (UID: \"b86e98a3-9c3b-4d66-b71c-535882b764d3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652301 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d8339f60-b243-4c2e-b416-86ef26c104a0-client-ca\") pod \"controller-manager-879f6c89f-lgzpz\" (UID: \"d8339f60-b243-4c2e-b416-86ef26c104a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652314 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5c458d1d-1777-46d0-a001-ebb0cae0e16f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652333 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lfn8\" (UniqueName: \"kubernetes.io/projected/d4f92806-fad5-406a-92ba-e668b4e9cede-kube-api-access-7lfn8\") pod \"openshift-apiserver-operator-796bbdcf4f-m4bcx\" (UID: \"d4f92806-fad5-406a-92ba-e668b4e9cede\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m4bcx" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652362 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8c94\" (UniqueName: \"kubernetes.io/projected/5211fa59-5683-47eb-bfa9-100d466cd1d5-kube-api-access-q8c94\") pod \"route-controller-manager-6576b87f9c-pzxnq\" (UID: \"5211fa59-5683-47eb-bfa9-100d466cd1d5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652381 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/aa0c5241-c63d-4e8d-9001-3cf220a3182e-machine-approver-tls\") pod \"machine-approver-56656f9798-jqgvc\" (UID: \"aa0c5241-c63d-4e8d-9001-3cf220a3182e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jqgvc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652401 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x62lh\" (UniqueName: \"kubernetes.io/projected/aa0c5241-c63d-4e8d-9001-3cf220a3182e-kube-api-access-x62lh\") pod \"machine-approver-56656f9798-jqgvc\" (UID: \"aa0c5241-c63d-4e8d-9001-3cf220a3182e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jqgvc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652420 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-audit-dir\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652452 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652471 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/b86e98a3-9c3b-4d66-b71c-535882b764d3-etcd-service-ca\") pod \"etcd-operator-b45778765-9sszs\" (UID: \"b86e98a3-9c3b-4d66-b71c-535882b764d3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652485 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b86e98a3-9c3b-4d66-b71c-535882b764d3-etcd-client\") pod \"etcd-operator-b45778765-9sszs\" (UID: \"b86e98a3-9c3b-4d66-b71c-535882b764d3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652500 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652518 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d8339f60-b243-4c2e-b416-86ef26c104a0-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-lgzpz\" (UID: \"d8339f60-b243-4c2e-b416-86ef26c104a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652532 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qtmn\" (UniqueName: \"kubernetes.io/projected/60c02e8f-1cb7-402b-a065-2f3b207cf60b-kube-api-access-5qtmn\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652547 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/62227ec8-217f-461a-8116-079b9466a726-serviceca\") pod \"image-pruner-29319840-n77h7\" (UID: \"62227ec8-217f-461a-8116-079b9466a726\") " pod="openshift-image-registry/image-pruner-29319840-n77h7" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652561 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kll62\" (UniqueName: \"kubernetes.io/projected/62227ec8-217f-461a-8116-079b9466a726-kube-api-access-kll62\") pod \"image-pruner-29319840-n77h7\" (UID: \"62227ec8-217f-461a-8116-079b9466a726\") " pod="openshift-image-registry/image-pruner-29319840-n77h7" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.652575 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b86e98a3-9c3b-4d66-b71c-535882b764d3-serving-cert\") pod \"etcd-operator-b45778765-9sszs\" (UID: \"b86e98a3-9c3b-4d66-b71c-535882b764d3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.675293 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.677245 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.677798 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.678236 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.678511 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.678631 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.678886 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.678997 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.679170 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.679303 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.679442 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.679553 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.679683 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.680000 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.680113 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.680210 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.680304 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.680438 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.684403 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.684786 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.685981 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.686166 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"serviceca" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.686598 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.699588 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.699807 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.699840 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.700054 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.700148 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.700154 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.700272 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.700362 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.707505 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.707700 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.707749 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.707820 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.707856 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.707904 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.707940 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.708180 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.706985 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.706206 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.712348 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.712425 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9kv9s"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.725554 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.726390 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.726634 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.726737 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.726792 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.726806 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.726900 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.726935 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.726942 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.726977 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.727058 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.727099 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.727194 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.727213 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.727395 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.727524 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.727591 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.727603 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.727715 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.727848 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.727932 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.727955 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.728017 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"pruner-dockercfg-p7bcw" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.728173 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.728211 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.728538 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9kv9s" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.728800 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.729040 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.729321 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.729876 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-q89hc"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.730544 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.730683 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.730751 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.731247 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qmgd4"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.731469 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-q89hc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.732189 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4cfbv"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.732363 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qmgd4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.732488 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.732994 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4cfbv" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.734249 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-r67k2"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.735076 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r67k2" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.737257 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.737389 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.737630 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.737776 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.738740 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-bc4wl"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.739517 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-bc4wl" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.739632 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.739995 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.744089 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.746673 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.746834 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.746871 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.747207 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.750081 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f9r9k"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.750627 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f9r9k" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.751749 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753077 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh672\" (UniqueName: \"kubernetes.io/projected/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-kube-api-access-gh672\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753118 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/60c02e8f-1cb7-402b-a065-2f3b207cf60b-audit\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753142 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/60c02e8f-1cb7-402b-a065-2f3b207cf60b-etcd-client\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753162 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753185 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b86e98a3-9c3b-4d66-b71c-535882b764d3-config\") pod \"etcd-operator-b45778765-9sszs\" (UID: \"b86e98a3-9c3b-4d66-b71c-535882b764d3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753210 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7239c02e-65ba-4a23-bb2e-ba8a6d39a142-images\") pod \"machine-config-operator-74547568cd-bpgds\" (UID: \"7239c02e-65ba-4a23-bb2e-ba8a6d39a142\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753234 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/60c02e8f-1cb7-402b-a065-2f3b207cf60b-serving-cert\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753258 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/60c02e8f-1cb7-402b-a065-2f3b207cf60b-node-pullsecrets\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753282 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-oauth-serving-cert\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753304 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753327 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5211fa59-5683-47eb-bfa9-100d466cd1d5-serving-cert\") pod \"route-controller-manager-6576b87f9c-pzxnq\" (UID: \"5211fa59-5683-47eb-bfa9-100d466cd1d5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753349 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/88723807-e4e6-48ce-9d84-a66a57863496-metrics-certs\") pod \"router-default-5444994796-5gnw9\" (UID: \"88723807-e4e6-48ce-9d84-a66a57863496\") " pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753371 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5c458d1d-1777-46d0-a001-ebb0cae0e16f-etcd-client\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753390 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60c02e8f-1cb7-402b-a065-2f3b207cf60b-config\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753409 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/60c02e8f-1cb7-402b-a065-2f3b207cf60b-encryption-config\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753430 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5c458d1d-1777-46d0-a001-ebb0cae0e16f-audit-dir\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753450 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5gw9\" (UniqueName: \"kubernetes.io/projected/5c458d1d-1777-46d0-a001-ebb0cae0e16f-kube-api-access-h5gw9\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753471 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zh6fw\" (UniqueName: \"kubernetes.io/projected/ca4b56e9-c63b-41a3-8182-90019963009f-kube-api-access-zh6fw\") pod \"machine-api-operator-5694c8668f-zlcqw\" (UID: \"ca4b56e9-c63b-41a3-8182-90019963009f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zlcqw" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753497 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5xzf\" (UniqueName: \"kubernetes.io/projected/6977dfac-797f-4284-ab89-48d3f92f332d-kube-api-access-v5xzf\") pod \"openshift-config-operator-7777fb866f-rnr6q\" (UID: \"6977dfac-797f-4284-ab89-48d3f92f332d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rnr6q" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753523 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f96w4\" (UniqueName: \"kubernetes.io/projected/4edad23e-9bba-4744-904e-a4960e17ad69-kube-api-access-f96w4\") pod \"authentication-operator-69f744f599-62g6c\" (UID: \"4edad23e-9bba-4744-904e-a4960e17ad69\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-62g6c" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753546 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5211fa59-5683-47eb-bfa9-100d466cd1d5-client-ca\") pod \"route-controller-manager-6576b87f9c-pzxnq\" (UID: \"5211fa59-5683-47eb-bfa9-100d466cd1d5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753568 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/164d8b36-2115-4fd0-8404-9aec777ad0fa-metrics-tls\") pod \"dns-operator-744455d44c-4hg76\" (UID: \"164d8b36-2115-4fd0-8404-9aec777ad0fa\") " pod="openshift-dns-operator/dns-operator-744455d44c-4hg76" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753587 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/88723807-e4e6-48ce-9d84-a66a57863496-stats-auth\") pod \"router-default-5444994796-5gnw9\" (UID: \"88723807-e4e6-48ce-9d84-a66a57863496\") " pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753608 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8339f60-b243-4c2e-b416-86ef26c104a0-config\") pod \"controller-manager-879f6c89f-lgzpz\" (UID: \"d8339f60-b243-4c2e-b416-86ef26c104a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753628 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d4f92806-fad5-406a-92ba-e668b4e9cede-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-m4bcx\" (UID: \"d4f92806-fad5-406a-92ba-e668b4e9cede\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m4bcx" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753667 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5211fa59-5683-47eb-bfa9-100d466cd1d5-config\") pod \"route-controller-manager-6576b87f9c-pzxnq\" (UID: \"5211fa59-5683-47eb-bfa9-100d466cd1d5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753875 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753898 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8339f60-b243-4c2e-b416-86ef26c104a0-serving-cert\") pod \"controller-manager-879f6c89f-lgzpz\" (UID: \"d8339f60-b243-4c2e-b416-86ef26c104a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753917 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca4b56e9-c63b-41a3-8182-90019963009f-config\") pod \"machine-api-operator-5694c8668f-zlcqw\" (UID: \"ca4b56e9-c63b-41a3-8182-90019963009f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zlcqw" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753938 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753959 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbblc\" (UniqueName: \"kubernetes.io/projected/164d8b36-2115-4fd0-8404-9aec777ad0fa-kube-api-access-rbblc\") pod \"dns-operator-744455d44c-4hg76\" (UID: \"164d8b36-2115-4fd0-8404-9aec777ad0fa\") " pod="openshift-dns-operator/dns-operator-744455d44c-4hg76" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753981 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/88723807-e4e6-48ce-9d84-a66a57863496-default-certificate\") pod \"router-default-5444994796-5gnw9\" (UID: \"88723807-e4e6-48ce-9d84-a66a57863496\") " pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.753999 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754015 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ca4b56e9-c63b-41a3-8182-90019963009f-images\") pod \"machine-api-operator-5694c8668f-zlcqw\" (UID: \"ca4b56e9-c63b-41a3-8182-90019963009f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zlcqw" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754032 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/6977dfac-797f-4284-ab89-48d3f92f332d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-rnr6q\" (UID: \"6977dfac-797f-4284-ab89-48d3f92f332d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rnr6q" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754049 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqrzm\" (UniqueName: \"kubernetes.io/projected/88723807-e4e6-48ce-9d84-a66a57863496-kube-api-access-vqrzm\") pod \"router-default-5444994796-5gnw9\" (UID: \"88723807-e4e6-48ce-9d84-a66a57863496\") " pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754065 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4edad23e-9bba-4744-904e-a4960e17ad69-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-62g6c\" (UID: \"4edad23e-9bba-4744-904e-a4960e17ad69\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-62g6c" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754082 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/aa0c5241-c63d-4e8d-9001-3cf220a3182e-auth-proxy-config\") pod \"machine-approver-56656f9798-jqgvc\" (UID: \"aa0c5241-c63d-4e8d-9001-3cf220a3182e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jqgvc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754098 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-audit-policies\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754114 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7239c02e-65ba-4a23-bb2e-ba8a6d39a142-auth-proxy-config\") pod \"machine-config-operator-74547568cd-bpgds\" (UID: \"7239c02e-65ba-4a23-bb2e-ba8a6d39a142\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754130 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4edad23e-9bba-4744-904e-a4960e17ad69-service-ca-bundle\") pod \"authentication-operator-69f744f599-62g6c\" (UID: \"4edad23e-9bba-4744-904e-a4960e17ad69\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-62g6c" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754148 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ca4b56e9-c63b-41a3-8182-90019963009f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-zlcqw\" (UID: \"ca4b56e9-c63b-41a3-8182-90019963009f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zlcqw" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754164 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trvjb\" (UniqueName: \"kubernetes.io/projected/7239c02e-65ba-4a23-bb2e-ba8a6d39a142-kube-api-access-trvjb\") pod \"machine-config-operator-74547568cd-bpgds\" (UID: \"7239c02e-65ba-4a23-bb2e-ba8a6d39a142\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754182 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5c458d1d-1777-46d0-a001-ebb0cae0e16f-audit-policies\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754197 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6977dfac-797f-4284-ab89-48d3f92f332d-serving-cert\") pod \"openshift-config-operator-7777fb866f-rnr6q\" (UID: \"6977dfac-797f-4284-ab89-48d3f92f332d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rnr6q" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754211 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88723807-e4e6-48ce-9d84-a66a57863496-service-ca-bundle\") pod \"router-default-5444994796-5gnw9\" (UID: \"88723807-e4e6-48ce-9d84-a66a57863496\") " pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754227 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgh4n\" (UniqueName: \"kubernetes.io/projected/d8339f60-b243-4c2e-b416-86ef26c104a0-kube-api-access-kgh4n\") pod \"controller-manager-879f6c89f-lgzpz\" (UID: \"d8339f60-b243-4c2e-b416-86ef26c104a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754241 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/60c02e8f-1cb7-402b-a065-2f3b207cf60b-audit-dir\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754256 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754273 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-console-oauth-config\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754292 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hsgs\" (UniqueName: \"kubernetes.io/projected/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-kube-api-access-7hsgs\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754311 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4edad23e-9bba-4744-904e-a4960e17ad69-serving-cert\") pod \"authentication-operator-69f744f599-62g6c\" (UID: \"4edad23e-9bba-4744-904e-a4960e17ad69\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-62g6c" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754330 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-console-serving-cert\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754360 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/5c458d1d-1777-46d0-a001-ebb0cae0e16f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754380 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/b86e98a3-9c3b-4d66-b71c-535882b764d3-etcd-ca\") pod \"etcd-operator-b45778765-9sszs\" (UID: \"b86e98a3-9c3b-4d66-b71c-535882b764d3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754400 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-console-config\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754416 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/5c458d1d-1777-46d0-a001-ebb0cae0e16f-encryption-config\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754432 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-service-ca\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754454 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5c458d1d-1777-46d0-a001-ebb0cae0e16f-serving-cert\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754470 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/60c02e8f-1cb7-402b-a065-2f3b207cf60b-etcd-serving-ca\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754486 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/60c02e8f-1cb7-402b-a065-2f3b207cf60b-image-import-ca\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754502 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/60c02e8f-1cb7-402b-a065-2f3b207cf60b-trusted-ca-bundle\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754520 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-trusted-ca-bundle\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754537 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754554 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa0c5241-c63d-4e8d-9001-3cf220a3182e-config\") pod \"machine-approver-56656f9798-jqgvc\" (UID: \"aa0c5241-c63d-4e8d-9001-3cf220a3182e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jqgvc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754572 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jx9rh\" (UniqueName: \"kubernetes.io/projected/b86e98a3-9c3b-4d66-b71c-535882b764d3-kube-api-access-jx9rh\") pod \"etcd-operator-b45778765-9sszs\" (UID: \"b86e98a3-9c3b-4d66-b71c-535882b764d3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754588 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5c458d1d-1777-46d0-a001-ebb0cae0e16f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754604 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lfn8\" (UniqueName: \"kubernetes.io/projected/d4f92806-fad5-406a-92ba-e668b4e9cede-kube-api-access-7lfn8\") pod \"openshift-apiserver-operator-796bbdcf4f-m4bcx\" (UID: \"d4f92806-fad5-406a-92ba-e668b4e9cede\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m4bcx" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754627 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d8339f60-b243-4c2e-b416-86ef26c104a0-client-ca\") pod \"controller-manager-879f6c89f-lgzpz\" (UID: \"d8339f60-b243-4c2e-b416-86ef26c104a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754667 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8c94\" (UniqueName: \"kubernetes.io/projected/5211fa59-5683-47eb-bfa9-100d466cd1d5-kube-api-access-q8c94\") pod \"route-controller-manager-6576b87f9c-pzxnq\" (UID: \"5211fa59-5683-47eb-bfa9-100d466cd1d5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754683 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/aa0c5241-c63d-4e8d-9001-3cf220a3182e-machine-approver-tls\") pod \"machine-approver-56656f9798-jqgvc\" (UID: \"aa0c5241-c63d-4e8d-9001-3cf220a3182e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jqgvc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754698 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x62lh\" (UniqueName: \"kubernetes.io/projected/aa0c5241-c63d-4e8d-9001-3cf220a3182e-kube-api-access-x62lh\") pod \"machine-approver-56656f9798-jqgvc\" (UID: \"aa0c5241-c63d-4e8d-9001-3cf220a3182e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jqgvc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754713 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-audit-dir\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754732 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/b86e98a3-9c3b-4d66-b71c-535882b764d3-etcd-service-ca\") pod \"etcd-operator-b45778765-9sszs\" (UID: \"b86e98a3-9c3b-4d66-b71c-535882b764d3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754748 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754764 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7239c02e-65ba-4a23-bb2e-ba8a6d39a142-proxy-tls\") pod \"machine-config-operator-74547568cd-bpgds\" (UID: \"7239c02e-65ba-4a23-bb2e-ba8a6d39a142\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754781 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b86e98a3-9c3b-4d66-b71c-535882b764d3-etcd-client\") pod \"etcd-operator-b45778765-9sszs\" (UID: \"b86e98a3-9c3b-4d66-b71c-535882b764d3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754796 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754810 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qtmn\" (UniqueName: \"kubernetes.io/projected/60c02e8f-1cb7-402b-a065-2f3b207cf60b-kube-api-access-5qtmn\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754826 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/62227ec8-217f-461a-8116-079b9466a726-serviceca\") pod \"image-pruner-29319840-n77h7\" (UID: \"62227ec8-217f-461a-8116-079b9466a726\") " pod="openshift-image-registry/image-pruner-29319840-n77h7" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754840 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kll62\" (UniqueName: \"kubernetes.io/projected/62227ec8-217f-461a-8116-079b9466a726-kube-api-access-kll62\") pod \"image-pruner-29319840-n77h7\" (UID: \"62227ec8-217f-461a-8116-079b9466a726\") " pod="openshift-image-registry/image-pruner-29319840-n77h7" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754857 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d8339f60-b243-4c2e-b416-86ef26c104a0-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-lgzpz\" (UID: \"d8339f60-b243-4c2e-b416-86ef26c104a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754872 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b86e98a3-9c3b-4d66-b71c-535882b764d3-serving-cert\") pod \"etcd-operator-b45778765-9sszs\" (UID: \"b86e98a3-9c3b-4d66-b71c-535882b764d3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754895 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754912 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754927 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4f92806-fad5-406a-92ba-e668b4e9cede-config\") pod \"openshift-apiserver-operator-796bbdcf4f-m4bcx\" (UID: \"d4f92806-fad5-406a-92ba-e668b4e9cede\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m4bcx" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.754942 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4edad23e-9bba-4744-904e-a4960e17ad69-config\") pod \"authentication-operator-69f744f599-62g6c\" (UID: \"4edad23e-9bba-4744-904e-a4960e17ad69\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-62g6c" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.755633 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4edad23e-9bba-4744-904e-a4960e17ad69-config\") pod \"authentication-operator-69f744f599-62g6c\" (UID: \"4edad23e-9bba-4744-904e-a4960e17ad69\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-62g6c" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.758500 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5t4kq"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.777981 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5t4kq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.778238 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.779387 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8339f60-b243-4c2e-b416-86ef26c104a0-config\") pod \"controller-manager-879f6c89f-lgzpz\" (UID: \"d8339f60-b243-4c2e-b416-86ef26c104a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.779486 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/88723807-e4e6-48ce-9d84-a66a57863496-stats-auth\") pod \"router-default-5444994796-5gnw9\" (UID: \"88723807-e4e6-48ce-9d84-a66a57863496\") " pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.779697 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/88723807-e4e6-48ce-9d84-a66a57863496-metrics-certs\") pod \"router-default-5444994796-5gnw9\" (UID: \"88723807-e4e6-48ce-9d84-a66a57863496\") " pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.780330 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b86e98a3-9c3b-4d66-b71c-535882b764d3-config\") pod \"etcd-operator-b45778765-9sszs\" (UID: \"b86e98a3-9c3b-4d66-b71c-535882b764d3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.781157 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/60c02e8f-1cb7-402b-a065-2f3b207cf60b-audit\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.781257 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/60c02e8f-1cb7-402b-a065-2f3b207cf60b-node-pullsecrets\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.783321 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/6977dfac-797f-4284-ab89-48d3f92f332d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-rnr6q\" (UID: \"6977dfac-797f-4284-ab89-48d3f92f332d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rnr6q" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.783889 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.784122 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5c458d1d-1777-46d0-a001-ebb0cae0e16f-etcd-client\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.784909 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5211fa59-5683-47eb-bfa9-100d466cd1d5-config\") pod \"route-controller-manager-6576b87f9c-pzxnq\" (UID: \"5211fa59-5683-47eb-bfa9-100d466cd1d5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.785339 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-audit-policies\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.785404 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-oauth-serving-cert\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.785690 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.786068 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/aa0c5241-c63d-4e8d-9001-3cf220a3182e-auth-proxy-config\") pod \"machine-approver-56656f9798-jqgvc\" (UID: \"aa0c5241-c63d-4e8d-9001-3cf220a3182e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jqgvc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.786105 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.786327 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5211fa59-5683-47eb-bfa9-100d466cd1d5-serving-cert\") pod \"route-controller-manager-6576b87f9c-pzxnq\" (UID: \"5211fa59-5683-47eb-bfa9-100d466cd1d5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.786407 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.786656 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zz87f"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.786910 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/b86e98a3-9c3b-4d66-b71c-535882b764d3-etcd-service-ca\") pod \"etcd-operator-b45778765-9sszs\" (UID: \"b86e98a3-9c3b-4d66-b71c-535882b764d3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.787398 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ca4b56e9-c63b-41a3-8182-90019963009f-images\") pod \"machine-api-operator-5694c8668f-zlcqw\" (UID: \"ca4b56e9-c63b-41a3-8182-90019963009f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zlcqw" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.787878 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca4b56e9-c63b-41a3-8182-90019963009f-config\") pod \"machine-api-operator-5694c8668f-zlcqw\" (UID: \"ca4b56e9-c63b-41a3-8182-90019963009f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zlcqw" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.788470 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88723807-e4e6-48ce-9d84-a66a57863496-service-ca-bundle\") pod \"router-default-5444994796-5gnw9\" (UID: \"88723807-e4e6-48ce-9d84-a66a57863496\") " pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.789351 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/62227ec8-217f-461a-8116-079b9466a726-serviceca\") pod \"image-pruner-29319840-n77h7\" (UID: \"62227ec8-217f-461a-8116-079b9466a726\") " pod="openshift-image-registry/image-pruner-29319840-n77h7" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.789914 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.790985 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/5c458d1d-1777-46d0-a001-ebb0cae0e16f-encryption-config\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.792066 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4edad23e-9bba-4744-904e-a4960e17ad69-service-ca-bundle\") pod \"authentication-operator-69f744f599-62g6c\" (UID: \"4edad23e-9bba-4744-904e-a4960e17ad69\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-62g6c" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.792134 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60c02e8f-1cb7-402b-a065-2f3b207cf60b-config\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.792250 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/b86e98a3-9c3b-4d66-b71c-535882b764d3-etcd-ca\") pod \"etcd-operator-b45778765-9sszs\" (UID: \"b86e98a3-9c3b-4d66-b71c-535882b764d3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.794277 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5c458d1d-1777-46d0-a001-ebb0cae0e16f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.792594 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5c458d1d-1777-46d0-a001-ebb0cae0e16f-audit-dir\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.793289 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/60c02e8f-1cb7-402b-a065-2f3b207cf60b-audit-dir\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.793494 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa0c5241-c63d-4e8d-9001-3cf220a3182e-config\") pod \"machine-approver-56656f9798-jqgvc\" (UID: \"aa0c5241-c63d-4e8d-9001-3cf220a3182e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jqgvc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.793235 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-audit-dir\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.793790 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/5c458d1d-1777-46d0-a001-ebb0cae0e16f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.792360 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-console-config\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.794923 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/60c02e8f-1cb7-402b-a065-2f3b207cf60b-etcd-serving-ca\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.795082 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ca4b56e9-c63b-41a3-8182-90019963009f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-zlcqw\" (UID: \"ca4b56e9-c63b-41a3-8182-90019963009f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zlcqw" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.794620 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/60c02e8f-1cb7-402b-a065-2f3b207cf60b-etcd-client\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.795341 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/60c02e8f-1cb7-402b-a065-2f3b207cf60b-encryption-config\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.796262 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/60c02e8f-1cb7-402b-a065-2f3b207cf60b-serving-cert\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.798318 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8339f60-b243-4c2e-b416-86ef26c104a0-serving-cert\") pod \"controller-manager-879f6c89f-lgzpz\" (UID: \"d8339f60-b243-4c2e-b416-86ef26c104a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.799196 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.800288 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zz87f" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.800372 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4edad23e-9bba-4744-904e-a4960e17ad69-serving-cert\") pod \"authentication-operator-69f744f599-62g6c\" (UID: \"4edad23e-9bba-4744-904e-a4960e17ad69\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-62g6c" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.800623 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-console-serving-cert\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.801150 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.801851 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4f92806-fad5-406a-92ba-e668b4e9cede-config\") pod \"openshift-apiserver-operator-796bbdcf4f-m4bcx\" (UID: \"d4f92806-fad5-406a-92ba-e668b4e9cede\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m4bcx" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.802075 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.802585 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5211fa59-5683-47eb-bfa9-100d466cd1d5-client-ca\") pod \"route-controller-manager-6576b87f9c-pzxnq\" (UID: \"5211fa59-5683-47eb-bfa9-100d466cd1d5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.802595 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-trusted-ca-bundle\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.802724 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.802761 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5c458d1d-1777-46d0-a001-ebb0cae0e16f-audit-policies\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.803103 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-clhm9"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.803552 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.804033 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b86e98a3-9c3b-4d66-b71c-535882b764d3-etcd-client\") pod \"etcd-operator-b45778765-9sszs\" (UID: \"b86e98a3-9c3b-4d66-b71c-535882b764d3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.804273 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-sz2pd"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.804927 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d8339f60-b243-4c2e-b416-86ef26c104a0-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-lgzpz\" (UID: \"d8339f60-b243-4c2e-b416-86ef26c104a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.805700 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d8339f60-b243-4c2e-b416-86ef26c104a0-client-ca\") pod \"controller-manager-879f6c89f-lgzpz\" (UID: \"d8339f60-b243-4c2e-b416-86ef26c104a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.806032 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-service-ca\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.807261 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6r6lv"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.807419 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/164d8b36-2115-4fd0-8404-9aec777ad0fa-metrics-tls\") pod \"dns-operator-744455d44c-4hg76\" (UID: \"164d8b36-2115-4fd0-8404-9aec777ad0fa\") " pod="openshift-dns-operator/dns-operator-744455d44c-4hg76" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.807673 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6r6lv" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.807919 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.808092 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sz2pd" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.808486 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4edad23e-9bba-4744-904e-a4960e17ad69-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-62g6c\" (UID: \"4edad23e-9bba-4744-904e-a4960e17ad69\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-62g6c" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.808551 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/60c02e8f-1cb7-402b-a065-2f3b207cf60b-trusted-ca-bundle\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.808695 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.808831 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-console-oauth-config\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.809037 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/60c02e8f-1cb7-402b-a065-2f3b207cf60b-image-import-ca\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.809125 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/aa0c5241-c63d-4e8d-9001-3cf220a3182e-machine-approver-tls\") pod \"machine-approver-56656f9798-jqgvc\" (UID: \"aa0c5241-c63d-4e8d-9001-3cf220a3182e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jqgvc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.809318 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/88723807-e4e6-48ce-9d84-a66a57863496-default-certificate\") pod \"router-default-5444994796-5gnw9\" (UID: \"88723807-e4e6-48ce-9d84-a66a57863496\") " pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.809485 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.809682 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5c458d1d-1777-46d0-a001-ebb0cae0e16f-serving-cert\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.809714 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.810823 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7zskd"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.811285 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7zskd" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.812715 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.812920 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d4f92806-fad5-406a-92ba-e668b4e9cede-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-m4bcx\" (UID: \"d4f92806-fad5-406a-92ba-e668b4e9cede\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m4bcx" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.813605 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.813655 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.815462 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6977dfac-797f-4284-ab89-48d3f92f332d-serving-cert\") pod \"openshift-config-operator-7777fb866f-rnr6q\" (UID: \"6977dfac-797f-4284-ab89-48d3f92f332d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rnr6q" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.815719 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.815728 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b86e98a3-9c3b-4d66-b71c-535882b764d3-serving-cert\") pod \"etcd-operator-b45778765-9sszs\" (UID: \"b86e98a3-9c3b-4d66-b71c-535882b764d3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.816278 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-962sz"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.818381 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.820325 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-gjq9t"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.822020 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-9j5tx"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.822902 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-9j5tx" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.826950 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-rnr6q"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.830665 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.831381 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.835618 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.837023 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkmqg"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.842947 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-62g6c"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.843155 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m4bcx"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.843289 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-zlcqw"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.843095 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkmqg" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.843371 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-d9lrc"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.843495 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9sszs"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.843598 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f6dzz"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.844105 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f6dzz" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.844959 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lgzpz"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.847492 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-kdnqq"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.848287 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-kdnqq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.848551 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-lzb22"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.849047 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.849993 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-m9zbh"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.850562 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-m9zbh" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.852421 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.852907 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.853740 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.855176 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.855555 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/49e2a7e3-f8ad-4c5a-8616-fca0daa506df-proxy-tls\") pod \"machine-config-controller-84d6567774-r67k2\" (UID: \"49e2a7e3-f8ad-4c5a-8616-fca0daa506df\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r67k2" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.855583 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgt6q\" (UniqueName: \"kubernetes.io/projected/c05dd83a-3728-4184-aad5-666bd79967a5-kube-api-access-sgt6q\") pod \"console-operator-58897d9998-q89hc\" (UID: \"c05dd83a-3728-4184-aad5-666bd79967a5\") " pod="openshift-console-operator/console-operator-58897d9998-q89hc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.855610 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tntf4\" (UniqueName: \"kubernetes.io/projected/e1fec829-b315-4468-9181-f36c2ee983b6-kube-api-access-tntf4\") pod \"openshift-controller-manager-operator-756b6f6bc6-4cfbv\" (UID: \"e1fec829-b315-4468-9181-f36c2ee983b6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4cfbv" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.855680 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4d857620-6a88-49db-86a6-0f2cf373d0e8-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-9kv9s\" (UID: \"4d857620-6a88-49db-86a6-0f2cf373d0e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9kv9s" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.855712 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trvjb\" (UniqueName: \"kubernetes.io/projected/7239c02e-65ba-4a23-bb2e-ba8a6d39a142-kube-api-access-trvjb\") pod \"machine-config-operator-74547568cd-bpgds\" (UID: \"7239c02e-65ba-4a23-bb2e-ba8a6d39a142\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.855734 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65ff30ff-b242-4857-9927-d8ab7620dd46-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6r6lv\" (UID: \"65ff30ff-b242-4857-9927-d8ab7620dd46\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6r6lv" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.855757 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-4hg76"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.855765 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/a2031b74-3b79-422f-86c0-f4ddad880624-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-f9r9k\" (UID: \"a2031b74-3b79-422f-86c0-f4ddad880624\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f9r9k" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.855823 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgtmt\" (UniqueName: \"kubernetes.io/projected/64add8f2-553b-45fe-84d6-6123c46532c3-kube-api-access-bgtmt\") pod \"downloads-7954f5f757-bc4wl\" (UID: \"64add8f2-553b-45fe-84d6-6123c46532c3\") " pod="openshift-console/downloads-7954f5f757-bc4wl" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.855897 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c05dd83a-3728-4184-aad5-666bd79967a5-config\") pod \"console-operator-58897d9998-q89hc\" (UID: \"c05dd83a-3728-4184-aad5-666bd79967a5\") " pod="openshift-console-operator/console-operator-58897d9998-q89hc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.856022 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blfkh\" (UniqueName: \"kubernetes.io/projected/81d9a2da-ae5c-437c-9460-eef4019785df-kube-api-access-blfkh\") pod \"cluster-samples-operator-665b6dd947-qmgd4\" (UID: \"81d9a2da-ae5c-437c-9460-eef4019785df\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qmgd4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.855951 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.856055 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e3be393-d661-43f7-ba25-80e0e84b8b72-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5t4kq\" (UID: \"4e3be393-d661-43f7-ba25-80e0e84b8b72\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5t4kq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.856117 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4d857620-6a88-49db-86a6-0f2cf373d0e8-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-9kv9s\" (UID: \"4d857620-6a88-49db-86a6-0f2cf373d0e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9kv9s" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.856153 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7239c02e-65ba-4a23-bb2e-ba8a6d39a142-proxy-tls\") pod \"machine-config-operator-74547568cd-bpgds\" (UID: \"7239c02e-65ba-4a23-bb2e-ba8a6d39a142\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.856175 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-9j5tx\" (UID: \"137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-9j5tx" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.856192 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e942a3a-1bca-4fc8-865c-37899588219b-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-zz87f\" (UID: \"0e942a3a-1bca-4fc8-865c-37899588219b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zz87f" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.856217 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/65ff30ff-b242-4857-9927-d8ab7620dd46-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6r6lv\" (UID: \"65ff30ff-b242-4857-9927-d8ab7620dd46\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6r6lv" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.856235 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5f0e64f9-08d4-4599-8731-cb62b636de47-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-7zskd\" (UID: \"5f0e64f9-08d4-4599-8731-cb62b636de47\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7zskd" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.856275 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/81d9a2da-ae5c-437c-9460-eef4019785df-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-qmgd4\" (UID: \"81d9a2da-ae5c-437c-9460-eef4019785df\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qmgd4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.856292 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0e942a3a-1bca-4fc8-865c-37899588219b-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-zz87f\" (UID: \"0e942a3a-1bca-4fc8-865c-37899588219b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zz87f" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.856308 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/edff5a66-7ade-44f3-a770-62a25d56e674-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-clhm9\" (UID: \"edff5a66-7ade-44f3-a770-62a25d56e674\") " pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.856324 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/11bd97c9-d0d7-4758-b3af-3386ebf8bb69-metrics-tls\") pod \"ingress-operator-5b745b69d9-cbvmm\" (UID: \"11bd97c9-d0d7-4758-b3af-3386ebf8bb69\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.856339 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrql6\" (UniqueName: \"kubernetes.io/projected/a2031b74-3b79-422f-86c0-f4ddad880624-kube-api-access-wrql6\") pod \"control-plane-machine-set-operator-78cbb6b69f-f9r9k\" (UID: \"a2031b74-3b79-422f-86c0-f4ddad880624\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f9r9k" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.856355 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-bc4wl"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.856366 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4e3be393-d661-43f7-ba25-80e0e84b8b72-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5t4kq\" (UID: \"4e3be393-d661-43f7-ba25-80e0e84b8b72\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5t4kq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.856386 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e3be393-d661-43f7-ba25-80e0e84b8b72-config\") pod \"kube-apiserver-operator-766d6c64bb-5t4kq\" (UID: \"4e3be393-d661-43f7-ba25-80e0e84b8b72\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5t4kq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.856401 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpmrk\" (UniqueName: \"kubernetes.io/projected/edff5a66-7ade-44f3-a770-62a25d56e674-kube-api-access-wpmrk\") pod \"marketplace-operator-79b997595-clhm9\" (UID: \"edff5a66-7ade-44f3-a770-62a25d56e674\") " pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.856750 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zh77q\" (UniqueName: \"kubernetes.io/projected/11bd97c9-d0d7-4758-b3af-3386ebf8bb69-kube-api-access-zh77q\") pod \"ingress-operator-5b745b69d9-cbvmm\" (UID: \"11bd97c9-d0d7-4758-b3af-3386ebf8bb69\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.856805 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c05dd83a-3728-4184-aad5-666bd79967a5-trusted-ca\") pod \"console-operator-58897d9998-q89hc\" (UID: \"c05dd83a-3728-4184-aad5-666bd79967a5\") " pod="openshift-console-operator/console-operator-58897d9998-q89hc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.856946 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7239c02e-65ba-4a23-bb2e-ba8a6d39a142-auth-proxy-config\") pod \"machine-config-operator-74547568cd-bpgds\" (UID: \"7239c02e-65ba-4a23-bb2e-ba8a6d39a142\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.857165 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qngb4\" (UniqueName: \"kubernetes.io/projected/4d857620-6a88-49db-86a6-0f2cf373d0e8-kube-api-access-qngb4\") pod \"cluster-image-registry-operator-dc59b4c8b-9kv9s\" (UID: \"4d857620-6a88-49db-86a6-0f2cf373d0e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9kv9s" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.857237 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89mw8\" (UniqueName: \"kubernetes.io/projected/137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3-kube-api-access-89mw8\") pod \"multus-admission-controller-857f4d67dd-9j5tx\" (UID: \"137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-9j5tx" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.857275 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1fec829-b315-4468-9181-f36c2ee983b6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4cfbv\" (UID: \"e1fec829-b315-4468-9181-f36c2ee983b6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4cfbv" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.857305 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f0e64f9-08d4-4599-8731-cb62b636de47-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-7zskd\" (UID: \"5f0e64f9-08d4-4599-8731-cb62b636de47\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7zskd" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.857333 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4d857620-6a88-49db-86a6-0f2cf373d0e8-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-9kv9s\" (UID: \"4d857620-6a88-49db-86a6-0f2cf373d0e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9kv9s" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.857443 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/49e2a7e3-f8ad-4c5a-8616-fca0daa506df-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-r67k2\" (UID: \"49e2a7e3-f8ad-4c5a-8616-fca0daa506df\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r67k2" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.857535 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f9r9k"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.857840 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7239c02e-65ba-4a23-bb2e-ba8a6d39a142-auth-proxy-config\") pod \"machine-config-operator-74547568cd-bpgds\" (UID: \"7239c02e-65ba-4a23-bb2e-ba8a6d39a142\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.857942 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/edff5a66-7ade-44f3-a770-62a25d56e674-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-clhm9\" (UID: \"edff5a66-7ade-44f3-a770-62a25d56e674\") " pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.858025 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/11bd97c9-d0d7-4758-b3af-3386ebf8bb69-trusted-ca\") pod \"ingress-operator-5b745b69d9-cbvmm\" (UID: \"11bd97c9-d0d7-4758-b3af-3386ebf8bb69\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.858113 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1fec829-b315-4468-9181-f36c2ee983b6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4cfbv\" (UID: \"e1fec829-b315-4468-9181-f36c2ee983b6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4cfbv" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.858203 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtxn9\" (UniqueName: \"kubernetes.io/projected/5f0e64f9-08d4-4599-8731-cb62b636de47-kube-api-access-jtxn9\") pod \"kube-storage-version-migrator-operator-b67b599dd-7zskd\" (UID: \"5f0e64f9-08d4-4599-8731-cb62b636de47\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7zskd" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.858309 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e942a3a-1bca-4fc8-865c-37899588219b-config\") pod \"kube-controller-manager-operator-78b949d7b-zz87f\" (UID: \"0e942a3a-1bca-4fc8-865c-37899588219b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zz87f" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.858400 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7239c02e-65ba-4a23-bb2e-ba8a6d39a142-images\") pod \"machine-config-operator-74547568cd-bpgds\" (UID: \"7239c02e-65ba-4a23-bb2e-ba8a6d39a142\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.858482 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bb7gr\" (UniqueName: \"kubernetes.io/projected/a8337228-dd2f-4b8d-9f5f-6bc4d9f9adeb-kube-api-access-bb7gr\") pod \"migrator-59844c95c7-sz2pd\" (UID: \"a8337228-dd2f-4b8d-9f5f-6bc4d9f9adeb\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sz2pd" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.858546 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7239c02e-65ba-4a23-bb2e-ba8a6d39a142-proxy-tls\") pod \"machine-config-operator-74547568cd-bpgds\" (UID: \"7239c02e-65ba-4a23-bb2e-ba8a6d39a142\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.858555 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/11bd97c9-d0d7-4758-b3af-3386ebf8bb69-bound-sa-token\") pod \"ingress-operator-5b745b69d9-cbvmm\" (UID: \"11bd97c9-d0d7-4758-b3af-3386ebf8bb69\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.858601 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65ff30ff-b242-4857-9927-d8ab7620dd46-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6r6lv\" (UID: \"65ff30ff-b242-4857-9927-d8ab7620dd46\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6r6lv" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.858632 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c05dd83a-3728-4184-aad5-666bd79967a5-serving-cert\") pod \"console-operator-58897d9998-q89hc\" (UID: \"c05dd83a-3728-4184-aad5-666bd79967a5\") " pod="openshift-console-operator/console-operator-58897d9998-q89hc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.858674 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvb9s\" (UniqueName: \"kubernetes.io/projected/49e2a7e3-f8ad-4c5a-8616-fca0daa506df-kube-api-access-pvb9s\") pod \"machine-config-controller-84d6567774-r67k2\" (UID: \"49e2a7e3-f8ad-4c5a-8616-fca0daa506df\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r67k2" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.858754 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-wkqff"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.858877 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7239c02e-65ba-4a23-bb2e-ba8a6d39a142-images\") pod \"machine-config-operator-74547568cd-bpgds\" (UID: \"7239c02e-65ba-4a23-bb2e-ba8a6d39a142\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.859503 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-wkqff" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.859973 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zz87f"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.861562 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4cfbv"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.863114 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.864883 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29319840-n77h7"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.865882 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-9j5tx"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.867115 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f6dzz"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.868274 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-q89hc"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.869536 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-r67k2"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.870919 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9kv9s"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.872125 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6r6lv"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.872859 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.873452 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkmqg"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.874795 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.884660 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.886120 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.887267 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-clhm9"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.888788 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-sz2pd"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.889754 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5t4kq"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.890744 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.891814 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7zskd"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.892827 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qmgd4"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.894010 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.894213 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-2f5wv"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.894975 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-2f5wv" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.895827 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-kdnqq"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.897310 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-lzb22"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.898919 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-wkqff"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.900324 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-m9zbh"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.901428 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.902410 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-cz7bk"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.903243 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-cz7bk" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.903366 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-cz7bk"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.919614 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.933528 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.936025 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7ct6f"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.938433 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.942311 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7ct6f"] Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.953565 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959348 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c05dd83a-3728-4184-aad5-666bd79967a5-serving-cert\") pod \"console-operator-58897d9998-q89hc\" (UID: \"c05dd83a-3728-4184-aad5-666bd79967a5\") " pod="openshift-console-operator/console-operator-58897d9998-q89hc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959389 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvb9s\" (UniqueName: \"kubernetes.io/projected/49e2a7e3-f8ad-4c5a-8616-fca0daa506df-kube-api-access-pvb9s\") pod \"machine-config-controller-84d6567774-r67k2\" (UID: \"49e2a7e3-f8ad-4c5a-8616-fca0daa506df\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r67k2" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959419 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/49e2a7e3-f8ad-4c5a-8616-fca0daa506df-proxy-tls\") pod \"machine-config-controller-84d6567774-r67k2\" (UID: \"49e2a7e3-f8ad-4c5a-8616-fca0daa506df\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r67k2" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959448 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5lrv\" (UniqueName: \"kubernetes.io/projected/ef8fb279-c7ac-480d-a945-d498ec0f523a-kube-api-access-g5lrv\") pod \"service-ca-9c57cc56f-kdnqq\" (UID: \"ef8fb279-c7ac-480d-a945-d498ec0f523a\") " pod="openshift-service-ca/service-ca-9c57cc56f-kdnqq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959481 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tntf4\" (UniqueName: \"kubernetes.io/projected/e1fec829-b315-4468-9181-f36c2ee983b6-kube-api-access-tntf4\") pod \"openshift-controller-manager-operator-756b6f6bc6-4cfbv\" (UID: \"e1fec829-b315-4468-9181-f36c2ee983b6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4cfbv" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959505 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgt6q\" (UniqueName: \"kubernetes.io/projected/c05dd83a-3728-4184-aad5-666bd79967a5-kube-api-access-sgt6q\") pod \"console-operator-58897d9998-q89hc\" (UID: \"c05dd83a-3728-4184-aad5-666bd79967a5\") " pod="openshift-console-operator/console-operator-58897d9998-q89hc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959532 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4d857620-6a88-49db-86a6-0f2cf373d0e8-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-9kv9s\" (UID: \"4d857620-6a88-49db-86a6-0f2cf373d0e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9kv9s" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959564 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65ff30ff-b242-4857-9927-d8ab7620dd46-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6r6lv\" (UID: \"65ff30ff-b242-4857-9927-d8ab7620dd46\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6r6lv" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959600 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/a2031b74-3b79-422f-86c0-f4ddad880624-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-f9r9k\" (UID: \"a2031b74-3b79-422f-86c0-f4ddad880624\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f9r9k" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959631 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgtmt\" (UniqueName: \"kubernetes.io/projected/64add8f2-553b-45fe-84d6-6123c46532c3-kube-api-access-bgtmt\") pod \"downloads-7954f5f757-bc4wl\" (UID: \"64add8f2-553b-45fe-84d6-6123c46532c3\") " pod="openshift-console/downloads-7954f5f757-bc4wl" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959689 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c05dd83a-3728-4184-aad5-666bd79967a5-config\") pod \"console-operator-58897d9998-q89hc\" (UID: \"c05dd83a-3728-4184-aad5-666bd79967a5\") " pod="openshift-console-operator/console-operator-58897d9998-q89hc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959717 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5n9k\" (UniqueName: \"kubernetes.io/projected/e8ab8a4b-c99d-4063-8791-33eecc55996e-kube-api-access-s5n9k\") pod \"package-server-manager-789f6589d5-nkmqg\" (UID: \"e8ab8a4b-c99d-4063-8791-33eecc55996e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkmqg" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959753 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blfkh\" (UniqueName: \"kubernetes.io/projected/81d9a2da-ae5c-437c-9460-eef4019785df-kube-api-access-blfkh\") pod \"cluster-samples-operator-665b6dd947-qmgd4\" (UID: \"81d9a2da-ae5c-437c-9460-eef4019785df\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qmgd4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959777 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e3be393-d661-43f7-ba25-80e0e84b8b72-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5t4kq\" (UID: \"4e3be393-d661-43f7-ba25-80e0e84b8b72\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5t4kq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959853 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4d857620-6a88-49db-86a6-0f2cf373d0e8-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-9kv9s\" (UID: \"4d857620-6a88-49db-86a6-0f2cf373d0e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9kv9s" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959880 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-9j5tx\" (UID: \"137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-9j5tx" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959900 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e942a3a-1bca-4fc8-865c-37899588219b-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-zz87f\" (UID: \"0e942a3a-1bca-4fc8-865c-37899588219b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zz87f" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959927 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/65ff30ff-b242-4857-9927-d8ab7620dd46-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6r6lv\" (UID: \"65ff30ff-b242-4857-9927-d8ab7620dd46\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6r6lv" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959945 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5f0e64f9-08d4-4599-8731-cb62b636de47-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-7zskd\" (UID: \"5f0e64f9-08d4-4599-8731-cb62b636de47\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7zskd" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959971 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/ef8fb279-c7ac-480d-a945-d498ec0f523a-signing-key\") pod \"service-ca-9c57cc56f-kdnqq\" (UID: \"ef8fb279-c7ac-480d-a945-d498ec0f523a\") " pod="openshift-service-ca/service-ca-9c57cc56f-kdnqq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.959991 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/81d9a2da-ae5c-437c-9460-eef4019785df-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-qmgd4\" (UID: \"81d9a2da-ae5c-437c-9460-eef4019785df\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qmgd4" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960013 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0e942a3a-1bca-4fc8-865c-37899588219b-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-zz87f\" (UID: \"0e942a3a-1bca-4fc8-865c-37899588219b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zz87f" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960033 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/edff5a66-7ade-44f3-a770-62a25d56e674-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-clhm9\" (UID: \"edff5a66-7ade-44f3-a770-62a25d56e674\") " pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960056 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/11bd97c9-d0d7-4758-b3af-3386ebf8bb69-metrics-tls\") pod \"ingress-operator-5b745b69d9-cbvmm\" (UID: \"11bd97c9-d0d7-4758-b3af-3386ebf8bb69\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960075 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrql6\" (UniqueName: \"kubernetes.io/projected/a2031b74-3b79-422f-86c0-f4ddad880624-kube-api-access-wrql6\") pod \"control-plane-machine-set-operator-78cbb6b69f-f9r9k\" (UID: \"a2031b74-3b79-422f-86c0-f4ddad880624\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f9r9k" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960094 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4e3be393-d661-43f7-ba25-80e0e84b8b72-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5t4kq\" (UID: \"4e3be393-d661-43f7-ba25-80e0e84b8b72\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5t4kq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960118 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e3be393-d661-43f7-ba25-80e0e84b8b72-config\") pod \"kube-apiserver-operator-766d6c64bb-5t4kq\" (UID: \"4e3be393-d661-43f7-ba25-80e0e84b8b72\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5t4kq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960137 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpmrk\" (UniqueName: \"kubernetes.io/projected/edff5a66-7ade-44f3-a770-62a25d56e674-kube-api-access-wpmrk\") pod \"marketplace-operator-79b997595-clhm9\" (UID: \"edff5a66-7ade-44f3-a770-62a25d56e674\") " pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960169 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zh77q\" (UniqueName: \"kubernetes.io/projected/11bd97c9-d0d7-4758-b3af-3386ebf8bb69-kube-api-access-zh77q\") pod \"ingress-operator-5b745b69d9-cbvmm\" (UID: \"11bd97c9-d0d7-4758-b3af-3386ebf8bb69\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960188 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c05dd83a-3728-4184-aad5-666bd79967a5-trusted-ca\") pod \"console-operator-58897d9998-q89hc\" (UID: \"c05dd83a-3728-4184-aad5-666bd79967a5\") " pod="openshift-console-operator/console-operator-58897d9998-q89hc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960208 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/e8ab8a4b-c99d-4063-8791-33eecc55996e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-nkmqg\" (UID: \"e8ab8a4b-c99d-4063-8791-33eecc55996e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkmqg" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960270 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qngb4\" (UniqueName: \"kubernetes.io/projected/4d857620-6a88-49db-86a6-0f2cf373d0e8-kube-api-access-qngb4\") pod \"cluster-image-registry-operator-dc59b4c8b-9kv9s\" (UID: \"4d857620-6a88-49db-86a6-0f2cf373d0e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9kv9s" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960300 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89mw8\" (UniqueName: \"kubernetes.io/projected/137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3-kube-api-access-89mw8\") pod \"multus-admission-controller-857f4d67dd-9j5tx\" (UID: \"137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-9j5tx" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960325 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1fec829-b315-4468-9181-f36c2ee983b6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4cfbv\" (UID: \"e1fec829-b315-4468-9181-f36c2ee983b6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4cfbv" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960343 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f0e64f9-08d4-4599-8731-cb62b636de47-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-7zskd\" (UID: \"5f0e64f9-08d4-4599-8731-cb62b636de47\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7zskd" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960391 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4d857620-6a88-49db-86a6-0f2cf373d0e8-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-9kv9s\" (UID: \"4d857620-6a88-49db-86a6-0f2cf373d0e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9kv9s" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960399 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c05dd83a-3728-4184-aad5-666bd79967a5-config\") pod \"console-operator-58897d9998-q89hc\" (UID: \"c05dd83a-3728-4184-aad5-666bd79967a5\") " pod="openshift-console-operator/console-operator-58897d9998-q89hc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960411 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/49e2a7e3-f8ad-4c5a-8616-fca0daa506df-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-r67k2\" (UID: \"49e2a7e3-f8ad-4c5a-8616-fca0daa506df\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r67k2" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960451 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/edff5a66-7ade-44f3-a770-62a25d56e674-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-clhm9\" (UID: \"edff5a66-7ade-44f3-a770-62a25d56e674\") " pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960580 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4d857620-6a88-49db-86a6-0f2cf373d0e8-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-9kv9s\" (UID: \"4d857620-6a88-49db-86a6-0f2cf373d0e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9kv9s" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960704 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/11bd97c9-d0d7-4758-b3af-3386ebf8bb69-trusted-ca\") pod \"ingress-operator-5b745b69d9-cbvmm\" (UID: \"11bd97c9-d0d7-4758-b3af-3386ebf8bb69\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960749 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1fec829-b315-4468-9181-f36c2ee983b6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4cfbv\" (UID: \"e1fec829-b315-4468-9181-f36c2ee983b6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4cfbv" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960780 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtxn9\" (UniqueName: \"kubernetes.io/projected/5f0e64f9-08d4-4599-8731-cb62b636de47-kube-api-access-jtxn9\") pod \"kube-storage-version-migrator-operator-b67b599dd-7zskd\" (UID: \"5f0e64f9-08d4-4599-8731-cb62b636de47\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7zskd" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960810 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/ef8fb279-c7ac-480d-a945-d498ec0f523a-signing-cabundle\") pod \"service-ca-9c57cc56f-kdnqq\" (UID: \"ef8fb279-c7ac-480d-a945-d498ec0f523a\") " pod="openshift-service-ca/service-ca-9c57cc56f-kdnqq" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960851 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e942a3a-1bca-4fc8-865c-37899588219b-config\") pod \"kube-controller-manager-operator-78b949d7b-zz87f\" (UID: \"0e942a3a-1bca-4fc8-865c-37899588219b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zz87f" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960899 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bb7gr\" (UniqueName: \"kubernetes.io/projected/a8337228-dd2f-4b8d-9f5f-6bc4d9f9adeb-kube-api-access-bb7gr\") pod \"migrator-59844c95c7-sz2pd\" (UID: \"a8337228-dd2f-4b8d-9f5f-6bc4d9f9adeb\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sz2pd" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960925 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/11bd97c9-d0d7-4758-b3af-3386ebf8bb69-bound-sa-token\") pod \"ingress-operator-5b745b69d9-cbvmm\" (UID: \"11bd97c9-d0d7-4758-b3af-3386ebf8bb69\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.960947 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65ff30ff-b242-4857-9927-d8ab7620dd46-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6r6lv\" (UID: \"65ff30ff-b242-4857-9927-d8ab7620dd46\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6r6lv" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.961240 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c05dd83a-3728-4184-aad5-666bd79967a5-trusted-ca\") pod \"console-operator-58897d9998-q89hc\" (UID: \"c05dd83a-3728-4184-aad5-666bd79967a5\") " pod="openshift-console-operator/console-operator-58897d9998-q89hc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.961357 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/49e2a7e3-f8ad-4c5a-8616-fca0daa506df-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-r67k2\" (UID: \"49e2a7e3-f8ad-4c5a-8616-fca0daa506df\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r67k2" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.963175 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4d857620-6a88-49db-86a6-0f2cf373d0e8-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-9kv9s\" (UID: \"4d857620-6a88-49db-86a6-0f2cf373d0e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9kv9s" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.963256 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c05dd83a-3728-4184-aad5-666bd79967a5-serving-cert\") pod \"console-operator-58897d9998-q89hc\" (UID: \"c05dd83a-3728-4184-aad5-666bd79967a5\") " pod="openshift-console-operator/console-operator-58897d9998-q89hc" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.973634 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Sep 30 00:11:28 crc kubenswrapper[4809]: I0930 00:11:28.993882 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.013167 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.024550 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/81d9a2da-ae5c-437c-9460-eef4019785df-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-qmgd4\" (UID: \"81d9a2da-ae5c-437c-9460-eef4019785df\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qmgd4" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.041276 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.053098 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.062252 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/ef8fb279-c7ac-480d-a945-d498ec0f523a-signing-cabundle\") pod \"service-ca-9c57cc56f-kdnqq\" (UID: \"ef8fb279-c7ac-480d-a945-d498ec0f523a\") " pod="openshift-service-ca/service-ca-9c57cc56f-kdnqq" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.062385 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5lrv\" (UniqueName: \"kubernetes.io/projected/ef8fb279-c7ac-480d-a945-d498ec0f523a-kube-api-access-g5lrv\") pod \"service-ca-9c57cc56f-kdnqq\" (UID: \"ef8fb279-c7ac-480d-a945-d498ec0f523a\") " pod="openshift-service-ca/service-ca-9c57cc56f-kdnqq" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.062532 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5n9k\" (UniqueName: \"kubernetes.io/projected/e8ab8a4b-c99d-4063-8791-33eecc55996e-kube-api-access-s5n9k\") pod \"package-server-manager-789f6589d5-nkmqg\" (UID: \"e8ab8a4b-c99d-4063-8791-33eecc55996e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkmqg" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.062665 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/ef8fb279-c7ac-480d-a945-d498ec0f523a-signing-key\") pod \"service-ca-9c57cc56f-kdnqq\" (UID: \"ef8fb279-c7ac-480d-a945-d498ec0f523a\") " pod="openshift-service-ca/service-ca-9c57cc56f-kdnqq" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.062777 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/e8ab8a4b-c99d-4063-8791-33eecc55996e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-nkmqg\" (UID: \"e8ab8a4b-c99d-4063-8791-33eecc55996e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkmqg" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.072552 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.086300 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1fec829-b315-4468-9181-f36c2ee983b6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4cfbv\" (UID: \"e1fec829-b315-4468-9181-f36c2ee983b6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4cfbv" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.093521 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.113079 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.122145 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1fec829-b315-4468-9181-f36c2ee983b6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4cfbv\" (UID: \"e1fec829-b315-4468-9181-f36c2ee983b6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4cfbv" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.134242 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.158711 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.165561 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/49e2a7e3-f8ad-4c5a-8616-fca0daa506df-proxy-tls\") pod \"machine-config-controller-84d6567774-r67k2\" (UID: \"49e2a7e3-f8ad-4c5a-8616-fca0daa506df\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r67k2" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.174124 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.193391 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.234157 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.253448 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.264921 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/11bd97c9-d0d7-4758-b3af-3386ebf8bb69-metrics-tls\") pod \"ingress-operator-5b745b69d9-cbvmm\" (UID: \"11bd97c9-d0d7-4758-b3af-3386ebf8bb69\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.284393 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.292326 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/11bd97c9-d0d7-4758-b3af-3386ebf8bb69-trusted-ca\") pod \"ingress-operator-5b745b69d9-cbvmm\" (UID: \"11bd97c9-d0d7-4758-b3af-3386ebf8bb69\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.293085 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.312890 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.333168 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.344395 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/a2031b74-3b79-422f-86c0-f4ddad880624-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-f9r9k\" (UID: \"a2031b74-3b79-422f-86c0-f4ddad880624\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f9r9k" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.354253 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.374354 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.381891 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e3be393-d661-43f7-ba25-80e0e84b8b72-config\") pod \"kube-apiserver-operator-766d6c64bb-5t4kq\" (UID: \"4e3be393-d661-43f7-ba25-80e0e84b8b72\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5t4kq" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.394508 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.414285 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.426579 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e3be393-d661-43f7-ba25-80e0e84b8b72-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5t4kq\" (UID: \"4e3be393-d661-43f7-ba25-80e0e84b8b72\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5t4kq" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.433359 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.474923 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh672\" (UniqueName: \"kubernetes.io/projected/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-kube-api-access-gh672\") pod \"oauth-openshift-558db77b4-d9lrc\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.502474 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgh4n\" (UniqueName: \"kubernetes.io/projected/d8339f60-b243-4c2e-b416-86ef26c104a0-kube-api-access-kgh4n\") pod \"controller-manager-879f6c89f-lgzpz\" (UID: \"d8339f60-b243-4c2e-b416-86ef26c104a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.512679 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqrzm\" (UniqueName: \"kubernetes.io/projected/88723807-e4e6-48ce-9d84-a66a57863496-kube-api-access-vqrzm\") pod \"router-default-5444994796-5gnw9\" (UID: \"88723807-e4e6-48ce-9d84-a66a57863496\") " pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.545382 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qtmn\" (UniqueName: \"kubernetes.io/projected/60c02e8f-1cb7-402b-a065-2f3b207cf60b-kube-api-access-5qtmn\") pod \"apiserver-76f77b778f-962sz\" (UID: \"60c02e8f-1cb7-402b-a065-2f3b207cf60b\") " pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.553891 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kll62\" (UniqueName: \"kubernetes.io/projected/62227ec8-217f-461a-8116-079b9466a726-kube-api-access-kll62\") pod \"image-pruner-29319840-n77h7\" (UID: \"62227ec8-217f-461a-8116-079b9466a726\") " pod="openshift-image-registry/image-pruner-29319840-n77h7" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.562730 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29319840-n77h7" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.573898 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zh6fw\" (UniqueName: \"kubernetes.io/projected/ca4b56e9-c63b-41a3-8182-90019963009f-kube-api-access-zh6fw\") pod \"machine-api-operator-5694c8668f-zlcqw\" (UID: \"ca4b56e9-c63b-41a3-8182-90019963009f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zlcqw" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.580008 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.601787 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5xzf\" (UniqueName: \"kubernetes.io/projected/6977dfac-797f-4284-ab89-48d3f92f332d-kube-api-access-v5xzf\") pod \"openshift-config-operator-7777fb866f-rnr6q\" (UID: \"6977dfac-797f-4284-ab89-48d3f92f332d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rnr6q" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.605398 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.617859 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hsgs\" (UniqueName: \"kubernetes.io/projected/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-kube-api-access-7hsgs\") pod \"console-f9d7485db-gjq9t\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.650406 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f96w4\" (UniqueName: \"kubernetes.io/projected/4edad23e-9bba-4744-904e-a4960e17ad69-kube-api-access-f96w4\") pod \"authentication-operator-69f744f599-62g6c\" (UID: \"4edad23e-9bba-4744-904e-a4960e17ad69\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-62g6c" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.654828 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbblc\" (UniqueName: \"kubernetes.io/projected/164d8b36-2115-4fd0-8404-9aec777ad0fa-kube-api-access-rbblc\") pod \"dns-operator-744455d44c-4hg76\" (UID: \"164d8b36-2115-4fd0-8404-9aec777ad0fa\") " pod="openshift-dns-operator/dns-operator-744455d44c-4hg76" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.682227 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x62lh\" (UniqueName: \"kubernetes.io/projected/aa0c5241-c63d-4e8d-9001-3cf220a3182e-kube-api-access-x62lh\") pod \"machine-approver-56656f9798-jqgvc\" (UID: \"aa0c5241-c63d-4e8d-9001-3cf220a3182e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jqgvc" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.693258 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5gw9\" (UniqueName: \"kubernetes.io/projected/5c458d1d-1777-46d0-a001-ebb0cae0e16f-kube-api-access-h5gw9\") pod \"apiserver-7bbb656c7d-jm5f4\" (UID: \"5c458d1d-1777-46d0-a001-ebb0cae0e16f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.715863 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8c94\" (UniqueName: \"kubernetes.io/projected/5211fa59-5683-47eb-bfa9-100d466cd1d5-kube-api-access-q8c94\") pod \"route-controller-manager-6576b87f9c-pzxnq\" (UID: \"5211fa59-5683-47eb-bfa9-100d466cd1d5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.728592 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jx9rh\" (UniqueName: \"kubernetes.io/projected/b86e98a3-9c3b-4d66-b71c-535882b764d3-kube-api-access-jx9rh\") pod \"etcd-operator-b45778765-9sszs\" (UID: \"b86e98a3-9c3b-4d66-b71c-535882b764d3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.739881 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-62g6c" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.750308 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-4hg76" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.753523 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.758072 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lfn8\" (UniqueName: \"kubernetes.io/projected/d4f92806-fad5-406a-92ba-e668b4e9cede-kube-api-access-7lfn8\") pod \"openshift-apiserver-operator-796bbdcf4f-m4bcx\" (UID: \"d4f92806-fad5-406a-92ba-e668b4e9cede\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m4bcx" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.761983 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.767787 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.775133 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.778411 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.783542 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e942a3a-1bca-4fc8-865c-37899588219b-config\") pod \"kube-controller-manager-operator-78b949d7b-zz87f\" (UID: \"0e942a3a-1bca-4fc8-865c-37899588219b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zz87f" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.787085 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.796834 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.807778 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.808899 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e942a3a-1bca-4fc8-865c-37899588219b-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-zz87f\" (UID: \"0e942a3a-1bca-4fc8-865c-37899588219b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zz87f" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.811530 4809 request.go:700] Waited for 1.010756039s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager-operator/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.813746 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.816514 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rnr6q" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.827103 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lgzpz"] Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.833750 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.837500 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-zlcqw" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.838845 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jqgvc" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.846050 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29319840-n77h7"] Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.853237 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.874972 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.881717 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-d9lrc"] Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.893821 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Sep 30 00:11:29 crc kubenswrapper[4809]: W0930 00:11:29.904211 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaa0c5241_c63d_4e8d_9001_3cf220a3182e.slice/crio-cbee0e4ede22262c2cb859a1106a9aaa1bdb18bd4a684173c783202271591ba7 WatchSource:0}: Error finding container cbee0e4ede22262c2cb859a1106a9aaa1bdb18bd4a684173c783202271591ba7: Status 404 returned error can't find the container with id cbee0e4ede22262c2cb859a1106a9aaa1bdb18bd4a684173c783202271591ba7 Sep 30 00:11:29 crc kubenswrapper[4809]: W0930 00:11:29.908481 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod62227ec8_217f_461a_8116_079b9466a726.slice/crio-8d1a0fa5c90930996c6707f5ec585e90ee21dad11e85104d30c4b9bf6dbf73c2 WatchSource:0}: Error finding container 8d1a0fa5c90930996c6707f5ec585e90ee21dad11e85104d30c4b9bf6dbf73c2: Status 404 returned error can't find the container with id 8d1a0fa5c90930996c6707f5ec585e90ee21dad11e85104d30c4b9bf6dbf73c2 Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.915232 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.928472 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65ff30ff-b242-4857-9927-d8ab7620dd46-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6r6lv\" (UID: \"65ff30ff-b242-4857-9927-d8ab7620dd46\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6r6lv" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.950094 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.952825 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.953187 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.958858 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m4bcx" Sep 30 00:11:29 crc kubenswrapper[4809]: E0930 00:11:29.962019 4809 secret.go:188] Couldn't get secret openshift-marketplace/marketplace-operator-metrics: failed to sync secret cache: timed out waiting for the condition Sep 30 00:11:29 crc kubenswrapper[4809]: E0930 00:11:29.962474 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/edff5a66-7ade-44f3-a770-62a25d56e674-marketplace-operator-metrics podName:edff5a66-7ade-44f3-a770-62a25d56e674 nodeName:}" failed. No retries permitted until 2025-09-30 00:11:30.462215027 +0000 UTC m=+141.498464435 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-operator-metrics" (UniqueName: "kubernetes.io/secret/edff5a66-7ade-44f3-a770-62a25d56e674-marketplace-operator-metrics") pod "marketplace-operator-79b997595-clhm9" (UID: "edff5a66-7ade-44f3-a770-62a25d56e674") : failed to sync secret cache: timed out waiting for the condition Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.963791 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65ff30ff-b242-4857-9927-d8ab7620dd46-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6r6lv\" (UID: \"65ff30ff-b242-4857-9927-d8ab7620dd46\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6r6lv" Sep 30 00:11:29 crc kubenswrapper[4809]: E0930 00:11:29.963886 4809 configmap.go:193] Couldn't get configMap openshift-kube-storage-version-migrator-operator/config: failed to sync configmap cache: timed out waiting for the condition Sep 30 00:11:29 crc kubenswrapper[4809]: E0930 00:11:29.963973 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5f0e64f9-08d4-4599-8731-cb62b636de47-config podName:5f0e64f9-08d4-4599-8731-cb62b636de47 nodeName:}" failed. No retries permitted until 2025-09-30 00:11:30.46396325 +0000 UTC m=+141.500212658 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/5f0e64f9-08d4-4599-8731-cb62b636de47-config") pod "kube-storage-version-migrator-operator-b67b599dd-7zskd" (UID: "5f0e64f9-08d4-4599-8731-cb62b636de47") : failed to sync configmap cache: timed out waiting for the condition Sep 30 00:11:29 crc kubenswrapper[4809]: E0930 00:11:29.964026 4809 secret.go:188] Couldn't get secret openshift-kube-storage-version-migrator-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Sep 30 00:11:29 crc kubenswrapper[4809]: E0930 00:11:29.964057 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5f0e64f9-08d4-4599-8731-cb62b636de47-serving-cert podName:5f0e64f9-08d4-4599-8731-cb62b636de47 nodeName:}" failed. No retries permitted until 2025-09-30 00:11:30.464051083 +0000 UTC m=+141.500300491 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/5f0e64f9-08d4-4599-8731-cb62b636de47-serving-cert") pod "kube-storage-version-migrator-operator-b67b599dd-7zskd" (UID: "5f0e64f9-08d4-4599-8731-cb62b636de47") : failed to sync secret cache: timed out waiting for the condition Sep 30 00:11:29 crc kubenswrapper[4809]: E0930 00:11:29.964171 4809 secret.go:188] Couldn't get secret openshift-multus/multus-admission-controller-secret: failed to sync secret cache: timed out waiting for the condition Sep 30 00:11:29 crc kubenswrapper[4809]: E0930 00:11:29.964201 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3-webhook-certs podName:137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3 nodeName:}" failed. No retries permitted until 2025-09-30 00:11:30.464194927 +0000 UTC m=+141.500444335 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3-webhook-certs") pod "multus-admission-controller-857f4d67dd-9j5tx" (UID: "137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3") : failed to sync secret cache: timed out waiting for the condition Sep 30 00:11:29 crc kubenswrapper[4809]: E0930 00:11:29.964542 4809 configmap.go:193] Couldn't get configMap openshift-marketplace/marketplace-trusted-ca: failed to sync configmap cache: timed out waiting for the condition Sep 30 00:11:29 crc kubenswrapper[4809]: E0930 00:11:29.964605 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/edff5a66-7ade-44f3-a770-62a25d56e674-marketplace-trusted-ca podName:edff5a66-7ade-44f3-a770-62a25d56e674 nodeName:}" failed. No retries permitted until 2025-09-30 00:11:30.464576929 +0000 UTC m=+141.500826557 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-trusted-ca" (UniqueName: "kubernetes.io/configmap/edff5a66-7ade-44f3-a770-62a25d56e674-marketplace-trusted-ca") pod "marketplace-operator-79b997595-clhm9" (UID: "edff5a66-7ade-44f3-a770-62a25d56e674") : failed to sync configmap cache: timed out waiting for the condition Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.965778 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-62g6c"] Sep 30 00:11:29 crc kubenswrapper[4809]: I0930 00:11:29.976306 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.001912 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.015132 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.036232 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.054183 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Sep 30 00:11:30 crc kubenswrapper[4809]: E0930 00:11:30.067868 4809 secret.go:188] Couldn't get secret openshift-service-ca/signing-key: failed to sync secret cache: timed out waiting for the condition Sep 30 00:11:30 crc kubenswrapper[4809]: E0930 00:11:30.067868 4809 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/package-server-manager-serving-cert: failed to sync secret cache: timed out waiting for the condition Sep 30 00:11:30 crc kubenswrapper[4809]: E0930 00:11:30.067945 4809 configmap.go:193] Couldn't get configMap openshift-service-ca/signing-cabundle: failed to sync configmap cache: timed out waiting for the condition Sep 30 00:11:30 crc kubenswrapper[4809]: E0930 00:11:30.067963 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ef8fb279-c7ac-480d-a945-d498ec0f523a-signing-key podName:ef8fb279-c7ac-480d-a945-d498ec0f523a nodeName:}" failed. No retries permitted until 2025-09-30 00:11:30.567939514 +0000 UTC m=+141.604188922 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-key" (UniqueName: "kubernetes.io/secret/ef8fb279-c7ac-480d-a945-d498ec0f523a-signing-key") pod "service-ca-9c57cc56f-kdnqq" (UID: "ef8fb279-c7ac-480d-a945-d498ec0f523a") : failed to sync secret cache: timed out waiting for the condition Sep 30 00:11:30 crc kubenswrapper[4809]: E0930 00:11:30.068037 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e8ab8a4b-c99d-4063-8791-33eecc55996e-package-server-manager-serving-cert podName:e8ab8a4b-c99d-4063-8791-33eecc55996e nodeName:}" failed. No retries permitted until 2025-09-30 00:11:30.568017426 +0000 UTC m=+141.604266834 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "package-server-manager-serving-cert" (UniqueName: "kubernetes.io/secret/e8ab8a4b-c99d-4063-8791-33eecc55996e-package-server-manager-serving-cert") pod "package-server-manager-789f6589d5-nkmqg" (UID: "e8ab8a4b-c99d-4063-8791-33eecc55996e") : failed to sync secret cache: timed out waiting for the condition Sep 30 00:11:30 crc kubenswrapper[4809]: E0930 00:11:30.068051 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ef8fb279-c7ac-480d-a945-d498ec0f523a-signing-cabundle podName:ef8fb279-c7ac-480d-a945-d498ec0f523a nodeName:}" failed. No retries permitted until 2025-09-30 00:11:30.568043797 +0000 UTC m=+141.604293195 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-cabundle" (UniqueName: "kubernetes.io/configmap/ef8fb279-c7ac-480d-a945-d498ec0f523a-signing-cabundle") pod "service-ca-9c57cc56f-kdnqq" (UID: "ef8fb279-c7ac-480d-a945-d498ec0f523a") : failed to sync configmap cache: timed out waiting for the condition Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.075181 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.096284 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.115319 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.133262 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.153099 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.172699 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.194737 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.213089 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.233717 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.251533 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-zlcqw"] Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.253330 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.269523 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-gjq9t"] Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.277844 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.278557 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-4hg76"] Sep 30 00:11:30 crc kubenswrapper[4809]: W0930 00:11:30.289919 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod513c8a3a_0885_48a4_ad5d_7d1dab1fcb05.slice/crio-1ac4a505560e98a81743d982570aa46eb561e7d7bc7037d47161c6f4611f81cd WatchSource:0}: Error finding container 1ac4a505560e98a81743d982570aa46eb561e7d7bc7037d47161c6f4611f81cd: Status 404 returned error can't find the container with id 1ac4a505560e98a81743d982570aa46eb561e7d7bc7037d47161c6f4611f81cd Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.294607 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.302124 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m4bcx"] Sep 30 00:11:30 crc kubenswrapper[4809]: W0930 00:11:30.306124 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod164d8b36_2115_4fd0_8404_9aec777ad0fa.slice/crio-b4e043a72326db7974613eb2b9b88441c60be14aeab86434db02504fccbf4e7b WatchSource:0}: Error finding container b4e043a72326db7974613eb2b9b88441c60be14aeab86434db02504fccbf4e7b: Status 404 returned error can't find the container with id b4e043a72326db7974613eb2b9b88441c60be14aeab86434db02504fccbf4e7b Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.314438 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Sep 30 00:11:30 crc kubenswrapper[4809]: W0930 00:11:30.320092 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd4f92806_fad5_406a_92ba_e668b4e9cede.slice/crio-70a995babe09a7bd71ff01477931bcaa52ba19acfdacd399457fa2bc39a8bac9 WatchSource:0}: Error finding container 70a995babe09a7bd71ff01477931bcaa52ba19acfdacd399457fa2bc39a8bac9: Status 404 returned error can't find the container with id 70a995babe09a7bd71ff01477931bcaa52ba19acfdacd399457fa2bc39a8bac9 Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.333477 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.338376 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4"] Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.354370 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.373944 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.386966 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9sszs"] Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.389102 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-rnr6q"] Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.394984 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Sep 30 00:11:30 crc kubenswrapper[4809]: W0930 00:11:30.403583 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb86e98a3_9c3b_4d66_b71c_535882b764d3.slice/crio-8f03aeb780652ef270eec11d71f83ec26f33654db6f1275909dada90986c33f5 WatchSource:0}: Error finding container 8f03aeb780652ef270eec11d71f83ec26f33654db6f1275909dada90986c33f5: Status 404 returned error can't find the container with id 8f03aeb780652ef270eec11d71f83ec26f33654db6f1275909dada90986c33f5 Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.404547 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq"] Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.413719 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Sep 30 00:11:30 crc kubenswrapper[4809]: W0930 00:11:30.421227 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6977dfac_797f_4284_ab89_48d3f92f332d.slice/crio-242484faf4e7fe9f71473465755ea29328ae55ee101b1e5990907a160dca55ef WatchSource:0}: Error finding container 242484faf4e7fe9f71473465755ea29328ae55ee101b1e5990907a160dca55ef: Status 404 returned error can't find the container with id 242484faf4e7fe9f71473465755ea29328ae55ee101b1e5990907a160dca55ef Sep 30 00:11:30 crc kubenswrapper[4809]: W0930 00:11:30.422023 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5211fa59_5683_47eb_bfa9_100d466cd1d5.slice/crio-d513d62e273c636b752d0c090b68dad3434908eb3d3d7bfeeb50859a980783bf WatchSource:0}: Error finding container d513d62e273c636b752d0c090b68dad3434908eb3d3d7bfeeb50859a980783bf: Status 404 returned error can't find the container with id d513d62e273c636b752d0c090b68dad3434908eb3d3d7bfeeb50859a980783bf Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.433516 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.443112 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" event={"ID":"0cb3f89e-4f46-45ca-be4e-948f5010dcc1","Type":"ContainerStarted","Data":"d04a597c30950bcb52d6643516a5b537f3f62afd9ac99ffb4a99c68064bc2e37"} Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.443165 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" event={"ID":"0cb3f89e-4f46-45ca-be4e-948f5010dcc1","Type":"ContainerStarted","Data":"aa0d04cafc72161113ca3765643c8d3260fba99e7cc45ca4c0ed3866f475a9aa"} Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.444233 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.446498 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-5gnw9" event={"ID":"88723807-e4e6-48ce-9d84-a66a57863496","Type":"ContainerStarted","Data":"515ea2e9412918d8c903141acdab3cfce72e7bfde82c4ec3e48334b0694ef40f"} Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.446531 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-5gnw9" event={"ID":"88723807-e4e6-48ce-9d84-a66a57863496","Type":"ContainerStarted","Data":"4878245fdf2112116ab8d53e81ae269565627bc5aae86e6ed076bbb835291194"} Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.450163 4809 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-d9lrc container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" start-of-body= Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.450212 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" podUID="0cb3f89e-4f46-45ca-be4e-948f5010dcc1" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.450664 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-962sz"] Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.451318 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-gjq9t" event={"ID":"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05","Type":"ContainerStarted","Data":"1ac4a505560e98a81743d982570aa46eb561e7d7bc7037d47161c6f4611f81cd"} Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.452917 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.454202 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" event={"ID":"d8339f60-b243-4c2e-b416-86ef26c104a0","Type":"ContainerStarted","Data":"0472183cf47259f6240250057f152f1e3c6b34d9b78ccd9bebcc4a80d5c2c38a"} Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.454227 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" event={"ID":"d8339f60-b243-4c2e-b416-86ef26c104a0","Type":"ContainerStarted","Data":"2ab3164815907d3a147737f4d4534c6bfea471b452e3a440dd6f481c8647f392"} Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.454494 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.457538 4809 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-lgzpz container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.457572 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" podUID="d8339f60-b243-4c2e-b416-86ef26c104a0" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Sep 30 00:11:30 crc kubenswrapper[4809]: W0930 00:11:30.469553 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60c02e8f_1cb7_402b_a065_2f3b207cf60b.slice/crio-6f5e7a5a0547a56567af5bcfb0a2ac445c76d4e957ec4dbe9a93d68b5c735a80 WatchSource:0}: Error finding container 6f5e7a5a0547a56567af5bcfb0a2ac445c76d4e957ec4dbe9a93d68b5c735a80: Status 404 returned error can't find the container with id 6f5e7a5a0547a56567af5bcfb0a2ac445c76d4e957ec4dbe9a93d68b5c735a80 Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.469575 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-zlcqw" event={"ID":"ca4b56e9-c63b-41a3-8182-90019963009f","Type":"ContainerStarted","Data":"a1ab3e7dbf5bad1f5ae254568becb7cb7e7dc5aa32eab5c8a7319f2712160941"} Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.476193 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.476410 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m4bcx" event={"ID":"d4f92806-fad5-406a-92ba-e668b4e9cede","Type":"ContainerStarted","Data":"70a995babe09a7bd71ff01477931bcaa52ba19acfdacd399457fa2bc39a8bac9"} Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.492707 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29319840-n77h7" event={"ID":"62227ec8-217f-461a-8116-079b9466a726","Type":"ContainerStarted","Data":"8688133dbb6713a4c7e89013d741c1a2f66663972f6ae187033a4320b0caa80e"} Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.492742 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29319840-n77h7" event={"ID":"62227ec8-217f-461a-8116-079b9466a726","Type":"ContainerStarted","Data":"8d1a0fa5c90930996c6707f5ec585e90ee21dad11e85104d30c4b9bf6dbf73c2"} Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.494066 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-9j5tx\" (UID: \"137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-9j5tx" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.494248 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5f0e64f9-08d4-4599-8731-cb62b636de47-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-7zskd\" (UID: \"5f0e64f9-08d4-4599-8731-cb62b636de47\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7zskd" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.494320 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/edff5a66-7ade-44f3-a770-62a25d56e674-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-clhm9\" (UID: \"edff5a66-7ade-44f3-a770-62a25d56e674\") " pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.494612 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f0e64f9-08d4-4599-8731-cb62b636de47-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-7zskd\" (UID: \"5f0e64f9-08d4-4599-8731-cb62b636de47\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7zskd" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.494796 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/edff5a66-7ade-44f3-a770-62a25d56e674-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-clhm9\" (UID: \"edff5a66-7ade-44f3-a770-62a25d56e674\") " pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.503372 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/edff5a66-7ade-44f3-a770-62a25d56e674-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-clhm9\" (UID: \"edff5a66-7ade-44f3-a770-62a25d56e674\") " pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.504163 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f0e64f9-08d4-4599-8731-cb62b636de47-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-7zskd\" (UID: \"5f0e64f9-08d4-4599-8731-cb62b636de47\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7zskd" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.504412 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.508981 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jqgvc" event={"ID":"aa0c5241-c63d-4e8d-9001-3cf220a3182e","Type":"ContainerStarted","Data":"dc5ac58d340ab16192ddf73d8b6981cbcfc57302acc4a5086e174ed9595a636c"} Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.509038 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jqgvc" event={"ID":"aa0c5241-c63d-4e8d-9001-3cf220a3182e","Type":"ContainerStarted","Data":"cbee0e4ede22262c2cb859a1106a9aaa1bdb18bd4a684173c783202271591ba7"} Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.512316 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/edff5a66-7ade-44f3-a770-62a25d56e674-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-clhm9\" (UID: \"edff5a66-7ade-44f3-a770-62a25d56e674\") " pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.514553 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" event={"ID":"5c458d1d-1777-46d0-a001-ebb0cae0e16f","Type":"ContainerStarted","Data":"0effc7fc429ff5ac840359de51237236890c9cb58c7b456b157bf556d19bee40"} Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.515020 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.516039 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-9j5tx\" (UID: \"137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-9j5tx" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.522817 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5f0e64f9-08d4-4599-8731-cb62b636de47-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-7zskd\" (UID: \"5f0e64f9-08d4-4599-8731-cb62b636de47\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7zskd" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.529405 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" event={"ID":"b86e98a3-9c3b-4d66-b71c-535882b764d3","Type":"ContainerStarted","Data":"8f03aeb780652ef270eec11d71f83ec26f33654db6f1275909dada90986c33f5"} Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.532935 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rnr6q" event={"ID":"6977dfac-797f-4284-ab89-48d3f92f332d","Type":"ContainerStarted","Data":"242484faf4e7fe9f71473465755ea29328ae55ee101b1e5990907a160dca55ef"} Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.533391 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.534311 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-4hg76" event={"ID":"164d8b36-2115-4fd0-8404-9aec777ad0fa","Type":"ContainerStarted","Data":"b4e043a72326db7974613eb2b9b88441c60be14aeab86434db02504fccbf4e7b"} Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.537925 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-62g6c" event={"ID":"4edad23e-9bba-4744-904e-a4960e17ad69","Type":"ContainerStarted","Data":"4d776fa5c73f2fac1012717c407b097ff5754ed2bff41865fe9a44fc1ddd6344"} Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.537983 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-62g6c" event={"ID":"4edad23e-9bba-4744-904e-a4960e17ad69","Type":"ContainerStarted","Data":"27459142e5b811c85172faa38b0abe9155f1c2df65e5bb5e6f60e26075afc0a5"} Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.545216 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" event={"ID":"5211fa59-5683-47eb-bfa9-100d466cd1d5","Type":"ContainerStarted","Data":"d513d62e273c636b752d0c090b68dad3434908eb3d3d7bfeeb50859a980783bf"} Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.557090 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.577250 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.593758 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.596240 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/ef8fb279-c7ac-480d-a945-d498ec0f523a-signing-cabundle\") pod \"service-ca-9c57cc56f-kdnqq\" (UID: \"ef8fb279-c7ac-480d-a945-d498ec0f523a\") " pod="openshift-service-ca/service-ca-9c57cc56f-kdnqq" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.596444 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/ef8fb279-c7ac-480d-a945-d498ec0f523a-signing-key\") pod \"service-ca-9c57cc56f-kdnqq\" (UID: \"ef8fb279-c7ac-480d-a945-d498ec0f523a\") " pod="openshift-service-ca/service-ca-9c57cc56f-kdnqq" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.596564 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/e8ab8a4b-c99d-4063-8791-33eecc55996e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-nkmqg\" (UID: \"e8ab8a4b-c99d-4063-8791-33eecc55996e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkmqg" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.598751 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/ef8fb279-c7ac-480d-a945-d498ec0f523a-signing-cabundle\") pod \"service-ca-9c57cc56f-kdnqq\" (UID: \"ef8fb279-c7ac-480d-a945-d498ec0f523a\") " pod="openshift-service-ca/service-ca-9c57cc56f-kdnqq" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.601945 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/e8ab8a4b-c99d-4063-8791-33eecc55996e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-nkmqg\" (UID: \"e8ab8a4b-c99d-4063-8791-33eecc55996e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkmqg" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.602148 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/ef8fb279-c7ac-480d-a945-d498ec0f523a-signing-key\") pod \"service-ca-9c57cc56f-kdnqq\" (UID: \"ef8fb279-c7ac-480d-a945-d498ec0f523a\") " pod="openshift-service-ca/service-ca-9c57cc56f-kdnqq" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.615023 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.634201 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.669691 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trvjb\" (UniqueName: \"kubernetes.io/projected/7239c02e-65ba-4a23-bb2e-ba8a6d39a142-kube-api-access-trvjb\") pod \"machine-config-operator-74547568cd-bpgds\" (UID: \"7239c02e-65ba-4a23-bb2e-ba8a6d39a142\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.673415 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.674981 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.694297 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.713264 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.733491 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.754171 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.762624 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.767995 4809 patch_prober.go:28] interesting pod/router-default-5444994796-5gnw9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 00:11:30 crc kubenswrapper[4809]: [-]has-synced failed: reason withheld Sep 30 00:11:30 crc kubenswrapper[4809]: [+]process-running ok Sep 30 00:11:30 crc kubenswrapper[4809]: healthz check failed Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.768948 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5gnw9" podUID="88723807-e4e6-48ce-9d84-a66a57863496" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.777604 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.794346 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.812281 4809 request.go:700] Waited for 1.916921386s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dnode-bootstrapper-token&limit=500&resourceVersion=0 Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.815098 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.835186 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.853815 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.874905 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.913624 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.924295 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds"] Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.934677 4809 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.954094 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Sep 30 00:11:30 crc kubenswrapper[4809]: I0930 00:11:30.991523 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvb9s\" (UniqueName: \"kubernetes.io/projected/49e2a7e3-f8ad-4c5a-8616-fca0daa506df-kube-api-access-pvb9s\") pod \"machine-config-controller-84d6567774-r67k2\" (UID: \"49e2a7e3-f8ad-4c5a-8616-fca0daa506df\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r67k2" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.010525 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgt6q\" (UniqueName: \"kubernetes.io/projected/c05dd83a-3728-4184-aad5-666bd79967a5-kube-api-access-sgt6q\") pod \"console-operator-58897d9998-q89hc\" (UID: \"c05dd83a-3728-4184-aad5-666bd79967a5\") " pod="openshift-console-operator/console-operator-58897d9998-q89hc" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.021933 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r67k2" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.029695 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tntf4\" (UniqueName: \"kubernetes.io/projected/e1fec829-b315-4468-9181-f36c2ee983b6-kube-api-access-tntf4\") pod \"openshift-controller-manager-operator-756b6f6bc6-4cfbv\" (UID: \"e1fec829-b315-4468-9181-f36c2ee983b6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4cfbv" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.048855 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgtmt\" (UniqueName: \"kubernetes.io/projected/64add8f2-553b-45fe-84d6-6123c46532c3-kube-api-access-bgtmt\") pod \"downloads-7954f5f757-bc4wl\" (UID: \"64add8f2-553b-45fe-84d6-6123c46532c3\") " pod="openshift-console/downloads-7954f5f757-bc4wl" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.068494 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blfkh\" (UniqueName: \"kubernetes.io/projected/81d9a2da-ae5c-437c-9460-eef4019785df-kube-api-access-blfkh\") pod \"cluster-samples-operator-665b6dd947-qmgd4\" (UID: \"81d9a2da-ae5c-437c-9460-eef4019785df\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qmgd4" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.089125 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4d857620-6a88-49db-86a6-0f2cf373d0e8-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-9kv9s\" (UID: \"4d857620-6a88-49db-86a6-0f2cf373d0e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9kv9s" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.127395 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/65ff30ff-b242-4857-9927-d8ab7620dd46-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6r6lv\" (UID: \"65ff30ff-b242-4857-9927-d8ab7620dd46\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6r6lv" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.139532 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrql6\" (UniqueName: \"kubernetes.io/projected/a2031b74-3b79-422f-86c0-f4ddad880624-kube-api-access-wrql6\") pod \"control-plane-machine-set-operator-78cbb6b69f-f9r9k\" (UID: \"a2031b74-3b79-422f-86c0-f4ddad880624\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f9r9k" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.155249 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0e942a3a-1bca-4fc8-865c-37899588219b-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-zz87f\" (UID: \"0e942a3a-1bca-4fc8-865c-37899588219b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zz87f" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.170709 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qngb4\" (UniqueName: \"kubernetes.io/projected/4d857620-6a88-49db-86a6-0f2cf373d0e8-kube-api-access-qngb4\") pod \"cluster-image-registry-operator-dc59b4c8b-9kv9s\" (UID: \"4d857620-6a88-49db-86a6-0f2cf373d0e8\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9kv9s" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.189385 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4e3be393-d661-43f7-ba25-80e0e84b8b72-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5t4kq\" (UID: \"4e3be393-d661-43f7-ba25-80e0e84b8b72\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5t4kq" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.208381 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpmrk\" (UniqueName: \"kubernetes.io/projected/edff5a66-7ade-44f3-a770-62a25d56e674-kube-api-access-wpmrk\") pod \"marketplace-operator-79b997595-clhm9\" (UID: \"edff5a66-7ade-44f3-a770-62a25d56e674\") " pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.224509 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-r67k2"] Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.231494 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89mw8\" (UniqueName: \"kubernetes.io/projected/137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3-kube-api-access-89mw8\") pod \"multus-admission-controller-857f4d67dd-9j5tx\" (UID: \"137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-9j5tx" Sep 30 00:11:31 crc kubenswrapper[4809]: W0930 00:11:31.234776 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod49e2a7e3_f8ad_4c5a_8616_fca0daa506df.slice/crio-cc4eac297754e7b5abb54d79e616969cc7523e09632d96f3637457e848746f13 WatchSource:0}: Error finding container cc4eac297754e7b5abb54d79e616969cc7523e09632d96f3637457e848746f13: Status 404 returned error can't find the container with id cc4eac297754e7b5abb54d79e616969cc7523e09632d96f3637457e848746f13 Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.262457 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zh77q\" (UniqueName: \"kubernetes.io/projected/11bd97c9-d0d7-4758-b3af-3386ebf8bb69-kube-api-access-zh77q\") pod \"ingress-operator-5b745b69d9-cbvmm\" (UID: \"11bd97c9-d0d7-4758-b3af-3386ebf8bb69\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.272736 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtxn9\" (UniqueName: \"kubernetes.io/projected/5f0e64f9-08d4-4599-8731-cb62b636de47-kube-api-access-jtxn9\") pod \"kube-storage-version-migrator-operator-b67b599dd-7zskd\" (UID: \"5f0e64f9-08d4-4599-8731-cb62b636de47\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7zskd" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.285108 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9kv9s" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.291882 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-q89hc" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.294995 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bb7gr\" (UniqueName: \"kubernetes.io/projected/a8337228-dd2f-4b8d-9f5f-6bc4d9f9adeb-kube-api-access-bb7gr\") pod \"migrator-59844c95c7-sz2pd\" (UID: \"a8337228-dd2f-4b8d-9f5f-6bc4d9f9adeb\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sz2pd" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.301544 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qmgd4" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.307267 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/11bd97c9-d0d7-4758-b3af-3386ebf8bb69-bound-sa-token\") pod \"ingress-operator-5b745b69d9-cbvmm\" (UID: \"11bd97c9-d0d7-4758-b3af-3386ebf8bb69\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.309755 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4cfbv" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.332991 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5lrv\" (UniqueName: \"kubernetes.io/projected/ef8fb279-c7ac-480d-a945-d498ec0f523a-kube-api-access-g5lrv\") pod \"service-ca-9c57cc56f-kdnqq\" (UID: \"ef8fb279-c7ac-480d-a945-d498ec0f523a\") " pod="openshift-service-ca/service-ca-9c57cc56f-kdnqq" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.338011 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-bc4wl" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.345300 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.355969 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f9r9k" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.365961 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5n9k\" (UniqueName: \"kubernetes.io/projected/e8ab8a4b-c99d-4063-8791-33eecc55996e-kube-api-access-s5n9k\") pod \"package-server-manager-789f6589d5-nkmqg\" (UID: \"e8ab8a4b-c99d-4063-8791-33eecc55996e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkmqg" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.366069 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5t4kq" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.373208 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zz87f" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.381899 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6r6lv" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.401304 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sz2pd" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.401862 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.407924 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7zskd" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.408679 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/df51fff3-849a-4fa7-976d-e54f548af4b7-apiservice-cert\") pod \"packageserver-d55dfcdfc-m5cr8\" (UID: \"df51fff3-849a-4fa7-976d-e54f548af4b7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.408739 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/df51fff3-849a-4fa7-976d-e54f548af4b7-webhook-cert\") pod \"packageserver-d55dfcdfc-m5cr8\" (UID: \"df51fff3-849a-4fa7-976d-e54f548af4b7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.408779 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2g9s\" (UniqueName: \"kubernetes.io/projected/960a6573-c6e4-49fe-9aff-0c9b43435215-kube-api-access-l2g9s\") pod \"collect-profiles-29319840-mxs7v\" (UID: \"960a6573-c6e4-49fe-9aff-0c9b43435215\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.408815 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a7429ac3-1745-4020-8463-6dd90982a5f3-metrics-tls\") pod \"dns-default-cz7bk\" (UID: \"a7429ac3-1745-4020-8463-6dd90982a5f3\") " pod="openshift-dns/dns-default-cz7bk" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.408858 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b1ad66e1-e4b8-416f-b7e8-2b965c866123-srv-cert\") pod \"catalog-operator-68c6474976-xpwlj\" (UID: \"b1ad66e1-e4b8-416f-b7e8-2b965c866123\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.408995 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/167b672c-2aed-451c-8117-e723c8cd10b5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-f6dzz\" (UID: \"167b672c-2aed-451c-8117-e723c8cd10b5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f6dzz" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.409037 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/00290c40-68ba-4728-b5fe-3ff985476bcf-registry-tls\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.409129 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a7429ac3-1745-4020-8463-6dd90982a5f3-config-volume\") pod \"dns-default-cz7bk\" (UID: \"a7429ac3-1745-4020-8463-6dd90982a5f3\") " pod="openshift-dns/dns-default-cz7bk" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.409165 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/36e6e320-57c7-44aa-ab53-4cdea9183752-node-bootstrap-token\") pod \"machine-config-server-2f5wv\" (UID: \"36e6e320-57c7-44aa-ab53-4cdea9183752\") " pod="openshift-machine-config-operator/machine-config-server-2f5wv" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.409195 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.409248 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/84b2b582-7e6f-491e-8164-beb11b7ee33c-cert\") pod \"ingress-canary-wkqff\" (UID: \"84b2b582-7e6f-491e-8164-beb11b7ee33c\") " pod="openshift-ingress-canary/ingress-canary-wkqff" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.409288 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/00290c40-68ba-4728-b5fe-3ff985476bcf-installation-pull-secrets\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.409338 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/00290c40-68ba-4728-b5fe-3ff985476bcf-registry-certificates\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.409360 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-794c8\" (UniqueName: \"kubernetes.io/projected/a7429ac3-1745-4020-8463-6dd90982a5f3-kube-api-access-794c8\") pod \"dns-default-cz7bk\" (UID: \"a7429ac3-1745-4020-8463-6dd90982a5f3\") " pod="openshift-dns/dns-default-cz7bk" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.409526 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmmrc\" (UniqueName: \"kubernetes.io/projected/167b672c-2aed-451c-8117-e723c8cd10b5-kube-api-access-fmmrc\") pod \"olm-operator-6b444d44fb-f6dzz\" (UID: \"167b672c-2aed-451c-8117-e723c8cd10b5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f6dzz" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.409562 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/960a6573-c6e4-49fe-9aff-0c9b43435215-secret-volume\") pod \"collect-profiles-29319840-mxs7v\" (UID: \"960a6573-c6e4-49fe-9aff-0c9b43435215\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.409625 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qm65p\" (UniqueName: \"kubernetes.io/projected/b1ad66e1-e4b8-416f-b7e8-2b965c866123-kube-api-access-qm65p\") pod \"catalog-operator-68c6474976-xpwlj\" (UID: \"b1ad66e1-e4b8-416f-b7e8-2b965c866123\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.409730 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdkgb\" (UniqueName: \"kubernetes.io/projected/84b2b582-7e6f-491e-8164-beb11b7ee33c-kube-api-access-fdkgb\") pod \"ingress-canary-wkqff\" (UID: \"84b2b582-7e6f-491e-8164-beb11b7ee33c\") " pod="openshift-ingress-canary/ingress-canary-wkqff" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.409775 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/36e6e320-57c7-44aa-ab53-4cdea9183752-certs\") pod \"machine-config-server-2f5wv\" (UID: \"36e6e320-57c7-44aa-ab53-4cdea9183752\") " pod="openshift-machine-config-operator/machine-config-server-2f5wv" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.409881 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a06ac13-0534-4f4a-a6d0-62cb79f6e512-serving-cert\") pod \"service-ca-operator-777779d784-m9zbh\" (UID: \"3a06ac13-0534-4f4a-a6d0-62cb79f6e512\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-m9zbh" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.409905 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqxlc\" (UniqueName: \"kubernetes.io/projected/3a06ac13-0534-4f4a-a6d0-62cb79f6e512-kube-api-access-kqxlc\") pod \"service-ca-operator-777779d784-m9zbh\" (UID: \"3a06ac13-0534-4f4a-a6d0-62cb79f6e512\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-m9zbh" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.409951 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5dc9\" (UniqueName: \"kubernetes.io/projected/df51fff3-849a-4fa7-976d-e54f548af4b7-kube-api-access-d5dc9\") pod \"packageserver-d55dfcdfc-m5cr8\" (UID: \"df51fff3-849a-4fa7-976d-e54f548af4b7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.409974 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b1ad66e1-e4b8-416f-b7e8-2b965c866123-profile-collector-cert\") pod \"catalog-operator-68c6474976-xpwlj\" (UID: \"b1ad66e1-e4b8-416f-b7e8-2b965c866123\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.410022 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vp6bf\" (UniqueName: \"kubernetes.io/projected/00290c40-68ba-4728-b5fe-3ff985476bcf-kube-api-access-vp6bf\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.410105 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrdtc\" (UniqueName: \"kubernetes.io/projected/36e6e320-57c7-44aa-ab53-4cdea9183752-kube-api-access-nrdtc\") pod \"machine-config-server-2f5wv\" (UID: \"36e6e320-57c7-44aa-ab53-4cdea9183752\") " pod="openshift-machine-config-operator/machine-config-server-2f5wv" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.410128 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/167b672c-2aed-451c-8117-e723c8cd10b5-srv-cert\") pod \"olm-operator-6b444d44fb-f6dzz\" (UID: \"167b672c-2aed-451c-8117-e723c8cd10b5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f6dzz" Sep 30 00:11:31 crc kubenswrapper[4809]: E0930 00:11:31.412989 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:31.912974093 +0000 UTC m=+142.949223501 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.413057 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/00290c40-68ba-4728-b5fe-3ff985476bcf-bound-sa-token\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.419778 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a06ac13-0534-4f4a-a6d0-62cb79f6e512-config\") pod \"service-ca-operator-777779d784-m9zbh\" (UID: \"3a06ac13-0534-4f4a-a6d0-62cb79f6e512\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-m9zbh" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.420789 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/00290c40-68ba-4728-b5fe-3ff985476bcf-ca-trust-extracted\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.420818 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/00290c40-68ba-4728-b5fe-3ff985476bcf-trusted-ca\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.420838 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/960a6573-c6e4-49fe-9aff-0c9b43435215-config-volume\") pod \"collect-profiles-29319840-mxs7v\" (UID: \"960a6573-c6e4-49fe-9aff-0c9b43435215\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.426465 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-9j5tx" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.427592 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/df51fff3-849a-4fa7-976d-e54f548af4b7-tmpfs\") pod \"packageserver-d55dfcdfc-m5cr8\" (UID: \"df51fff3-849a-4fa7-976d-e54f548af4b7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.445073 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkmqg" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.462254 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-kdnqq" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.529218 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.529430 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/84b2b582-7e6f-491e-8164-beb11b7ee33c-cert\") pod \"ingress-canary-wkqff\" (UID: \"84b2b582-7e6f-491e-8164-beb11b7ee33c\") " pod="openshift-ingress-canary/ingress-canary-wkqff" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.529473 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/00290c40-68ba-4728-b5fe-3ff985476bcf-installation-pull-secrets\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.529504 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/413c828b-94ff-4866-a5ed-a533cf34a56a-mountpoint-dir\") pod \"csi-hostpathplugin-7ct6f\" (UID: \"413c828b-94ff-4866-a5ed-a533cf34a56a\") " pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.529531 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/00290c40-68ba-4728-b5fe-3ff985476bcf-registry-certificates\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.529553 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-794c8\" (UniqueName: \"kubernetes.io/projected/a7429ac3-1745-4020-8463-6dd90982a5f3-kube-api-access-794c8\") pod \"dns-default-cz7bk\" (UID: \"a7429ac3-1745-4020-8463-6dd90982a5f3\") " pod="openshift-dns/dns-default-cz7bk" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.529574 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/413c828b-94ff-4866-a5ed-a533cf34a56a-plugins-dir\") pod \"csi-hostpathplugin-7ct6f\" (UID: \"413c828b-94ff-4866-a5ed-a533cf34a56a\") " pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.529622 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmmrc\" (UniqueName: \"kubernetes.io/projected/167b672c-2aed-451c-8117-e723c8cd10b5-kube-api-access-fmmrc\") pod \"olm-operator-6b444d44fb-f6dzz\" (UID: \"167b672c-2aed-451c-8117-e723c8cd10b5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f6dzz" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.529670 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/960a6573-c6e4-49fe-9aff-0c9b43435215-secret-volume\") pod \"collect-profiles-29319840-mxs7v\" (UID: \"960a6573-c6e4-49fe-9aff-0c9b43435215\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.529696 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/413c828b-94ff-4866-a5ed-a533cf34a56a-registration-dir\") pod \"csi-hostpathplugin-7ct6f\" (UID: \"413c828b-94ff-4866-a5ed-a533cf34a56a\") " pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.529757 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qm65p\" (UniqueName: \"kubernetes.io/projected/b1ad66e1-e4b8-416f-b7e8-2b965c866123-kube-api-access-qm65p\") pod \"catalog-operator-68c6474976-xpwlj\" (UID: \"b1ad66e1-e4b8-416f-b7e8-2b965c866123\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.529785 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdkgb\" (UniqueName: \"kubernetes.io/projected/84b2b582-7e6f-491e-8164-beb11b7ee33c-kube-api-access-fdkgb\") pod \"ingress-canary-wkqff\" (UID: \"84b2b582-7e6f-491e-8164-beb11b7ee33c\") " pod="openshift-ingress-canary/ingress-canary-wkqff" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.529799 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/413c828b-94ff-4866-a5ed-a533cf34a56a-socket-dir\") pod \"csi-hostpathplugin-7ct6f\" (UID: \"413c828b-94ff-4866-a5ed-a533cf34a56a\") " pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.529816 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/36e6e320-57c7-44aa-ab53-4cdea9183752-certs\") pod \"machine-config-server-2f5wv\" (UID: \"36e6e320-57c7-44aa-ab53-4cdea9183752\") " pod="openshift-machine-config-operator/machine-config-server-2f5wv" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.529835 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psws6\" (UniqueName: \"kubernetes.io/projected/413c828b-94ff-4866-a5ed-a533cf34a56a-kube-api-access-psws6\") pod \"csi-hostpathplugin-7ct6f\" (UID: \"413c828b-94ff-4866-a5ed-a533cf34a56a\") " pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.529856 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a06ac13-0534-4f4a-a6d0-62cb79f6e512-serving-cert\") pod \"service-ca-operator-777779d784-m9zbh\" (UID: \"3a06ac13-0534-4f4a-a6d0-62cb79f6e512\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-m9zbh" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.529873 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqxlc\" (UniqueName: \"kubernetes.io/projected/3a06ac13-0534-4f4a-a6d0-62cb79f6e512-kube-api-access-kqxlc\") pod \"service-ca-operator-777779d784-m9zbh\" (UID: \"3a06ac13-0534-4f4a-a6d0-62cb79f6e512\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-m9zbh" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.529910 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5dc9\" (UniqueName: \"kubernetes.io/projected/df51fff3-849a-4fa7-976d-e54f548af4b7-kube-api-access-d5dc9\") pod \"packageserver-d55dfcdfc-m5cr8\" (UID: \"df51fff3-849a-4fa7-976d-e54f548af4b7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.529936 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b1ad66e1-e4b8-416f-b7e8-2b965c866123-profile-collector-cert\") pod \"catalog-operator-68c6474976-xpwlj\" (UID: \"b1ad66e1-e4b8-416f-b7e8-2b965c866123\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.529964 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vp6bf\" (UniqueName: \"kubernetes.io/projected/00290c40-68ba-4728-b5fe-3ff985476bcf-kube-api-access-vp6bf\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.530039 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrdtc\" (UniqueName: \"kubernetes.io/projected/36e6e320-57c7-44aa-ab53-4cdea9183752-kube-api-access-nrdtc\") pod \"machine-config-server-2f5wv\" (UID: \"36e6e320-57c7-44aa-ab53-4cdea9183752\") " pod="openshift-machine-config-operator/machine-config-server-2f5wv" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.530068 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/167b672c-2aed-451c-8117-e723c8cd10b5-srv-cert\") pod \"olm-operator-6b444d44fb-f6dzz\" (UID: \"167b672c-2aed-451c-8117-e723c8cd10b5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f6dzz" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.530134 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/00290c40-68ba-4728-b5fe-3ff985476bcf-bound-sa-token\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.530193 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a06ac13-0534-4f4a-a6d0-62cb79f6e512-config\") pod \"service-ca-operator-777779d784-m9zbh\" (UID: \"3a06ac13-0534-4f4a-a6d0-62cb79f6e512\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-m9zbh" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.530219 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/00290c40-68ba-4728-b5fe-3ff985476bcf-ca-trust-extracted\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.530242 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/00290c40-68ba-4728-b5fe-3ff985476bcf-trusted-ca\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.530263 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/960a6573-c6e4-49fe-9aff-0c9b43435215-config-volume\") pod \"collect-profiles-29319840-mxs7v\" (UID: \"960a6573-c6e4-49fe-9aff-0c9b43435215\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.530297 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/df51fff3-849a-4fa7-976d-e54f548af4b7-tmpfs\") pod \"packageserver-d55dfcdfc-m5cr8\" (UID: \"df51fff3-849a-4fa7-976d-e54f548af4b7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.530321 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/df51fff3-849a-4fa7-976d-e54f548af4b7-apiservice-cert\") pod \"packageserver-d55dfcdfc-m5cr8\" (UID: \"df51fff3-849a-4fa7-976d-e54f548af4b7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.530346 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/df51fff3-849a-4fa7-976d-e54f548af4b7-webhook-cert\") pod \"packageserver-d55dfcdfc-m5cr8\" (UID: \"df51fff3-849a-4fa7-976d-e54f548af4b7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.530372 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2g9s\" (UniqueName: \"kubernetes.io/projected/960a6573-c6e4-49fe-9aff-0c9b43435215-kube-api-access-l2g9s\") pod \"collect-profiles-29319840-mxs7v\" (UID: \"960a6573-c6e4-49fe-9aff-0c9b43435215\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.530394 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a7429ac3-1745-4020-8463-6dd90982a5f3-metrics-tls\") pod \"dns-default-cz7bk\" (UID: \"a7429ac3-1745-4020-8463-6dd90982a5f3\") " pod="openshift-dns/dns-default-cz7bk" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.530427 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b1ad66e1-e4b8-416f-b7e8-2b965c866123-srv-cert\") pod \"catalog-operator-68c6474976-xpwlj\" (UID: \"b1ad66e1-e4b8-416f-b7e8-2b965c866123\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.530517 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/413c828b-94ff-4866-a5ed-a533cf34a56a-csi-data-dir\") pod \"csi-hostpathplugin-7ct6f\" (UID: \"413c828b-94ff-4866-a5ed-a533cf34a56a\") " pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.530543 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/167b672c-2aed-451c-8117-e723c8cd10b5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-f6dzz\" (UID: \"167b672c-2aed-451c-8117-e723c8cd10b5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f6dzz" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.530569 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/00290c40-68ba-4728-b5fe-3ff985476bcf-registry-tls\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.530592 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a7429ac3-1745-4020-8463-6dd90982a5f3-config-volume\") pod \"dns-default-cz7bk\" (UID: \"a7429ac3-1745-4020-8463-6dd90982a5f3\") " pod="openshift-dns/dns-default-cz7bk" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.530611 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/36e6e320-57c7-44aa-ab53-4cdea9183752-node-bootstrap-token\") pod \"machine-config-server-2f5wv\" (UID: \"36e6e320-57c7-44aa-ab53-4cdea9183752\") " pod="openshift-machine-config-operator/machine-config-server-2f5wv" Sep 30 00:11:31 crc kubenswrapper[4809]: E0930 00:11:31.530893 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:32.030858898 +0000 UTC m=+143.067108486 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.539826 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/36e6e320-57c7-44aa-ab53-4cdea9183752-certs\") pod \"machine-config-server-2f5wv\" (UID: \"36e6e320-57c7-44aa-ab53-4cdea9183752\") " pod="openshift-machine-config-operator/machine-config-server-2f5wv" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.540981 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/00290c40-68ba-4728-b5fe-3ff985476bcf-registry-certificates\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.541499 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/df51fff3-849a-4fa7-976d-e54f548af4b7-tmpfs\") pod \"packageserver-d55dfcdfc-m5cr8\" (UID: \"df51fff3-849a-4fa7-976d-e54f548af4b7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.541580 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/960a6573-c6e4-49fe-9aff-0c9b43435215-config-volume\") pod \"collect-profiles-29319840-mxs7v\" (UID: \"960a6573-c6e4-49fe-9aff-0c9b43435215\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.542417 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/00290c40-68ba-4728-b5fe-3ff985476bcf-ca-trust-extracted\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.542912 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a06ac13-0534-4f4a-a6d0-62cb79f6e512-config\") pod \"service-ca-operator-777779d784-m9zbh\" (UID: \"3a06ac13-0534-4f4a-a6d0-62cb79f6e512\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-m9zbh" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.543862 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a7429ac3-1745-4020-8463-6dd90982a5f3-config-volume\") pod \"dns-default-cz7bk\" (UID: \"a7429ac3-1745-4020-8463-6dd90982a5f3\") " pod="openshift-dns/dns-default-cz7bk" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.550326 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a06ac13-0534-4f4a-a6d0-62cb79f6e512-serving-cert\") pod \"service-ca-operator-777779d784-m9zbh\" (UID: \"3a06ac13-0534-4f4a-a6d0-62cb79f6e512\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-m9zbh" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.557350 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/00290c40-68ba-4728-b5fe-3ff985476bcf-trusted-ca\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.565826 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/00290c40-68ba-4728-b5fe-3ff985476bcf-installation-pull-secrets\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.567233 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b1ad66e1-e4b8-416f-b7e8-2b965c866123-profile-collector-cert\") pod \"catalog-operator-68c6474976-xpwlj\" (UID: \"b1ad66e1-e4b8-416f-b7e8-2b965c866123\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.569423 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/167b672c-2aed-451c-8117-e723c8cd10b5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-f6dzz\" (UID: \"167b672c-2aed-451c-8117-e723c8cd10b5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f6dzz" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.569818 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jqgvc" event={"ID":"aa0c5241-c63d-4e8d-9001-3cf220a3182e","Type":"ContainerStarted","Data":"92063a3d311c8033366be84a5a81e9223470432f83043acb275306d5a277cb62"} Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.580626 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a7429ac3-1745-4020-8463-6dd90982a5f3-metrics-tls\") pod \"dns-default-cz7bk\" (UID: \"a7429ac3-1745-4020-8463-6dd90982a5f3\") " pod="openshift-dns/dns-default-cz7bk" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.584289 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/df51fff3-849a-4fa7-976d-e54f548af4b7-apiservice-cert\") pod \"packageserver-d55dfcdfc-m5cr8\" (UID: \"df51fff3-849a-4fa7-976d-e54f548af4b7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.585014 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/84b2b582-7e6f-491e-8164-beb11b7ee33c-cert\") pod \"ingress-canary-wkqff\" (UID: \"84b2b582-7e6f-491e-8164-beb11b7ee33c\") " pod="openshift-ingress-canary/ingress-canary-wkqff" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.590997 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/00290c40-68ba-4728-b5fe-3ff985476bcf-registry-tls\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.591566 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b1ad66e1-e4b8-416f-b7e8-2b965c866123-srv-cert\") pod \"catalog-operator-68c6474976-xpwlj\" (UID: \"b1ad66e1-e4b8-416f-b7e8-2b965c866123\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.592132 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/167b672c-2aed-451c-8117-e723c8cd10b5-srv-cert\") pod \"olm-operator-6b444d44fb-f6dzz\" (UID: \"167b672c-2aed-451c-8117-e723c8cd10b5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f6dzz" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.594443 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/36e6e320-57c7-44aa-ab53-4cdea9183752-node-bootstrap-token\") pod \"machine-config-server-2f5wv\" (UID: \"36e6e320-57c7-44aa-ab53-4cdea9183752\") " pod="openshift-machine-config-operator/machine-config-server-2f5wv" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.595158 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/960a6573-c6e4-49fe-9aff-0c9b43435215-secret-volume\") pod \"collect-profiles-29319840-mxs7v\" (UID: \"960a6573-c6e4-49fe-9aff-0c9b43435215\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.596214 4809 generic.go:334] "Generic (PLEG): container finished" podID="5c458d1d-1777-46d0-a001-ebb0cae0e16f" containerID="10dd01fe9fd234774168fdb83bb6284773e17ce8d9b7d4c8fda4c29d7b8f7e9f" exitCode=0 Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.596299 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" event={"ID":"5c458d1d-1777-46d0-a001-ebb0cae0e16f","Type":"ContainerDied","Data":"10dd01fe9fd234774168fdb83bb6284773e17ce8d9b7d4c8fda4c29d7b8f7e9f"} Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.596362 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/df51fff3-849a-4fa7-976d-e54f548af4b7-webhook-cert\") pod \"packageserver-d55dfcdfc-m5cr8\" (UID: \"df51fff3-849a-4fa7-976d-e54f548af4b7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.607105 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" event={"ID":"b86e98a3-9c3b-4d66-b71c-535882b764d3","Type":"ContainerStarted","Data":"4861f18e82c5fc144603ed9c4c7a55c23830e8d445b66a04f75e4051478dd40c"} Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.613817 4809 generic.go:334] "Generic (PLEG): container finished" podID="6977dfac-797f-4284-ab89-48d3f92f332d" containerID="0b572394c4f6c89566a8acde13a552aa32ec1df10f144edbbea0eaa96b602647" exitCode=0 Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.613895 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rnr6q" event={"ID":"6977dfac-797f-4284-ab89-48d3f92f332d","Type":"ContainerDied","Data":"0b572394c4f6c89566a8acde13a552aa32ec1df10f144edbbea0eaa96b602647"} Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.617340 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5dc9\" (UniqueName: \"kubernetes.io/projected/df51fff3-849a-4fa7-976d-e54f548af4b7-kube-api-access-d5dc9\") pod \"packageserver-d55dfcdfc-m5cr8\" (UID: \"df51fff3-849a-4fa7-976d-e54f548af4b7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.621196 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qm65p\" (UniqueName: \"kubernetes.io/projected/b1ad66e1-e4b8-416f-b7e8-2b965c866123-kube-api-access-qm65p\") pod \"catalog-operator-68c6474976-xpwlj\" (UID: \"b1ad66e1-e4b8-416f-b7e8-2b965c866123\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.627051 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-4hg76" event={"ID":"164d8b36-2115-4fd0-8404-9aec777ad0fa","Type":"ContainerStarted","Data":"9c768334f0c754081b4ca10acc16c9495a9bbe69296eb8974bd6209009f82236"} Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.627092 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-4hg76" event={"ID":"164d8b36-2115-4fd0-8404-9aec777ad0fa","Type":"ContainerStarted","Data":"e75167dd44776c2a9eb56c238924d2a33c9e880df31191773139edd8b057d7a8"} Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.631572 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdkgb\" (UniqueName: \"kubernetes.io/projected/84b2b582-7e6f-491e-8164-beb11b7ee33c-kube-api-access-fdkgb\") pod \"ingress-canary-wkqff\" (UID: \"84b2b582-7e6f-491e-8164-beb11b7ee33c\") " pod="openshift-ingress-canary/ingress-canary-wkqff" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.634197 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.634253 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/413c828b-94ff-4866-a5ed-a533cf34a56a-mountpoint-dir\") pod \"csi-hostpathplugin-7ct6f\" (UID: \"413c828b-94ff-4866-a5ed-a533cf34a56a\") " pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.634288 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/413c828b-94ff-4866-a5ed-a533cf34a56a-plugins-dir\") pod \"csi-hostpathplugin-7ct6f\" (UID: \"413c828b-94ff-4866-a5ed-a533cf34a56a\") " pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.634333 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/413c828b-94ff-4866-a5ed-a533cf34a56a-registration-dir\") pod \"csi-hostpathplugin-7ct6f\" (UID: \"413c828b-94ff-4866-a5ed-a533cf34a56a\") " pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.634368 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/413c828b-94ff-4866-a5ed-a533cf34a56a-socket-dir\") pod \"csi-hostpathplugin-7ct6f\" (UID: \"413c828b-94ff-4866-a5ed-a533cf34a56a\") " pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.634395 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psws6\" (UniqueName: \"kubernetes.io/projected/413c828b-94ff-4866-a5ed-a533cf34a56a-kube-api-access-psws6\") pod \"csi-hostpathplugin-7ct6f\" (UID: \"413c828b-94ff-4866-a5ed-a533cf34a56a\") " pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.634538 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/413c828b-94ff-4866-a5ed-a533cf34a56a-csi-data-dir\") pod \"csi-hostpathplugin-7ct6f\" (UID: \"413c828b-94ff-4866-a5ed-a533cf34a56a\") " pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.634702 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/413c828b-94ff-4866-a5ed-a533cf34a56a-csi-data-dir\") pod \"csi-hostpathplugin-7ct6f\" (UID: \"413c828b-94ff-4866-a5ed-a533cf34a56a\") " pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.634764 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/413c828b-94ff-4866-a5ed-a533cf34a56a-mountpoint-dir\") pod \"csi-hostpathplugin-7ct6f\" (UID: \"413c828b-94ff-4866-a5ed-a533cf34a56a\") " pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" Sep 30 00:11:31 crc kubenswrapper[4809]: E0930 00:11:31.634839 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:32.134822182 +0000 UTC m=+143.171071590 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.635018 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/413c828b-94ff-4866-a5ed-a533cf34a56a-plugins-dir\") pod \"csi-hostpathplugin-7ct6f\" (UID: \"413c828b-94ff-4866-a5ed-a533cf34a56a\") " pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.635282 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/413c828b-94ff-4866-a5ed-a533cf34a56a-registration-dir\") pod \"csi-hostpathplugin-7ct6f\" (UID: \"413c828b-94ff-4866-a5ed-a533cf34a56a\") " pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.635299 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqxlc\" (UniqueName: \"kubernetes.io/projected/3a06ac13-0534-4f4a-a6d0-62cb79f6e512-kube-api-access-kqxlc\") pod \"service-ca-operator-777779d784-m9zbh\" (UID: \"3a06ac13-0534-4f4a-a6d0-62cb79f6e512\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-m9zbh" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.635349 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/413c828b-94ff-4866-a5ed-a533cf34a56a-socket-dir\") pod \"csi-hostpathplugin-7ct6f\" (UID: \"413c828b-94ff-4866-a5ed-a533cf34a56a\") " pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.645930 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" event={"ID":"5211fa59-5683-47eb-bfa9-100d466cd1d5","Type":"ContainerStarted","Data":"952f0a909c48c771bd482b6b592c00985ea19a27b08fbb0ade5472bd962f80e1"} Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.646406 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.651942 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds" event={"ID":"7239c02e-65ba-4a23-bb2e-ba8a6d39a142","Type":"ContainerStarted","Data":"34d1d14bf9df58b98b91aa3dda00faef7a630687fa221c226b67d436d44daa7d"} Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.651981 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds" event={"ID":"7239c02e-65ba-4a23-bb2e-ba8a6d39a142","Type":"ContainerStarted","Data":"7054d940f5f4d534160c9da031f4d02e8d6304c344d6da17c22201590ca0d387"} Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.651990 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds" event={"ID":"7239c02e-65ba-4a23-bb2e-ba8a6d39a142","Type":"ContainerStarted","Data":"0083df9e427b8ed70c85553374b678d2da0754521106ca8b84193c934809d129"} Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.655424 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-794c8\" (UniqueName: \"kubernetes.io/projected/a7429ac3-1745-4020-8463-6dd90982a5f3-kube-api-access-794c8\") pod \"dns-default-cz7bk\" (UID: \"a7429ac3-1745-4020-8463-6dd90982a5f3\") " pod="openshift-dns/dns-default-cz7bk" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.669038 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-zlcqw" event={"ID":"ca4b56e9-c63b-41a3-8182-90019963009f","Type":"ContainerStarted","Data":"3760fe80f85c5b98b77c5e6e46fb784e9024944cb36e54ce8f0720ba87be3be6"} Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.669110 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-zlcqw" event={"ID":"ca4b56e9-c63b-41a3-8182-90019963009f","Type":"ContainerStarted","Data":"562e68bc76b463dec898895f39d3e96cd92ed528f4656b7a3032ef9b09098b7e"} Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.670603 4809 generic.go:334] "Generic (PLEG): container finished" podID="60c02e8f-1cb7-402b-a065-2f3b207cf60b" containerID="9a9c9f968d72f09613601c59c691c3eb0177f78785893347ccd55e699a5a240d" exitCode=0 Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.670733 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-962sz" event={"ID":"60c02e8f-1cb7-402b-a065-2f3b207cf60b","Type":"ContainerDied","Data":"9a9c9f968d72f09613601c59c691c3eb0177f78785893347ccd55e699a5a240d"} Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.670751 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-962sz" event={"ID":"60c02e8f-1cb7-402b-a065-2f3b207cf60b","Type":"ContainerStarted","Data":"6f5e7a5a0547a56567af5bcfb0a2ac445c76d4e957ec4dbe9a93d68b5c735a80"} Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.672471 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r67k2" event={"ID":"49e2a7e3-f8ad-4c5a-8616-fca0daa506df","Type":"ContainerStarted","Data":"aaa86c70f1077dc194b1d7e85e6b9c0a486ee0d389dbcb632edd39652f6be682"} Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.672543 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r67k2" event={"ID":"49e2a7e3-f8ad-4c5a-8616-fca0daa506df","Type":"ContainerStarted","Data":"cc4eac297754e7b5abb54d79e616969cc7523e09632d96f3637457e848746f13"} Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.674007 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m4bcx" event={"ID":"d4f92806-fad5-406a-92ba-e668b4e9cede","Type":"ContainerStarted","Data":"b32c561dc3970365bf94b59db94ead7fc5cd60a168f606acb4ccb3fdf775b4c4"} Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.678803 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-gjq9t" event={"ID":"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05","Type":"ContainerStarted","Data":"5e7906f1d2b769184e067fffbefdd7be56b7289c19d8d2a1b2875cee81bd14ec"} Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.679352 4809 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-lgzpz container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.679402 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" podUID="d8339f60-b243-4c2e-b416-86ef26c104a0" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.684237 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vp6bf\" (UniqueName: \"kubernetes.io/projected/00290c40-68ba-4728-b5fe-3ff985476bcf-kube-api-access-vp6bf\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.705211 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/00290c40-68ba-4728-b5fe-3ff985476bcf-bound-sa-token\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.727155 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.730484 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmmrc\" (UniqueName: \"kubernetes.io/projected/167b672c-2aed-451c-8117-e723c8cd10b5-kube-api-access-fmmrc\") pod \"olm-operator-6b444d44fb-f6dzz\" (UID: \"167b672c-2aed-451c-8117-e723c8cd10b5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f6dzz" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.735155 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:31 crc kubenswrapper[4809]: E0930 00:11:31.737682 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:32.237631651 +0000 UTC m=+143.273881059 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.752815 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f6dzz" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.763117 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrdtc\" (UniqueName: \"kubernetes.io/projected/36e6e320-57c7-44aa-ab53-4cdea9183752-kube-api-access-nrdtc\") pod \"machine-config-server-2f5wv\" (UID: \"36e6e320-57c7-44aa-ab53-4cdea9183752\") " pod="openshift-machine-config-operator/machine-config-server-2f5wv" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.763751 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2g9s\" (UniqueName: \"kubernetes.io/projected/960a6573-c6e4-49fe-9aff-0c9b43435215-kube-api-access-l2g9s\") pod \"collect-profiles-29319840-mxs7v\" (UID: \"960a6573-c6e4-49fe-9aff-0c9b43435215\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.772360 4809 patch_prober.go:28] interesting pod/router-default-5444994796-5gnw9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 00:11:31 crc kubenswrapper[4809]: [-]has-synced failed: reason withheld Sep 30 00:11:31 crc kubenswrapper[4809]: [+]process-running ok Sep 30 00:11:31 crc kubenswrapper[4809]: healthz check failed Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.772413 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5gnw9" podUID="88723807-e4e6-48ce-9d84-a66a57863496" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.781031 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-m9zbh" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.788402 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.798840 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.807669 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-wkqff" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.810060 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psws6\" (UniqueName: \"kubernetes.io/projected/413c828b-94ff-4866-a5ed-a533cf34a56a-kube-api-access-psws6\") pod \"csi-hostpathplugin-7ct6f\" (UID: \"413c828b-94ff-4866-a5ed-a533cf34a56a\") " pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.815783 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-2f5wv" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.824996 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-cz7bk" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.841295 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:31 crc kubenswrapper[4809]: E0930 00:11:31.841716 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:32.341704387 +0000 UTC m=+143.377953795 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.851994 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.968734 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:31 crc kubenswrapper[4809]: E0930 00:11:31.969257 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:32.469232625 +0000 UTC m=+143.505482033 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:31 crc kubenswrapper[4809]: I0930 00:11:31.986793 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.043508 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-q89hc"] Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.069966 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:32 crc kubenswrapper[4809]: E0930 00:11:32.070331 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:32.570318991 +0000 UTC m=+143.606568399 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.171568 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:32 crc kubenswrapper[4809]: E0930 00:11:32.172110 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:32.672095599 +0000 UTC m=+143.708345007 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.219482 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qmgd4"] Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.273846 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:32 crc kubenswrapper[4809]: E0930 00:11:32.275523 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:32.775505766 +0000 UTC m=+143.811755174 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:32 crc kubenswrapper[4809]: W0930 00:11:32.303344 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc05dd83a_3728_4184_aad5_666bd79967a5.slice/crio-8052841563b71ac6c100651c4b163a506db0adc81cdd30d65aadb5339405b974 WatchSource:0}: Error finding container 8052841563b71ac6c100651c4b163a506db0adc81cdd30d65aadb5339405b974: Status 404 returned error can't find the container with id 8052841563b71ac6c100651c4b163a506db0adc81cdd30d65aadb5339405b974 Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.351817 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9kv9s"] Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.351870 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-bc4wl"] Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.378669 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4cfbv"] Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.382378 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:32 crc kubenswrapper[4809]: E0930 00:11:32.382814 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:32.88279725 +0000 UTC m=+143.919046658 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.484031 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:32 crc kubenswrapper[4809]: E0930 00:11:32.484328 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:32.984317179 +0000 UTC m=+144.020566587 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.593014 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:32 crc kubenswrapper[4809]: E0930 00:11:32.593569 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:33.093544952 +0000 UTC m=+144.129794360 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.648574 4809 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-pzxnq container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.648976 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" podUID="5211fa59-5683-47eb-bfa9-100d466cd1d5" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.662371 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-gjq9t" podStartSLOduration=121.662330109 podStartE2EDuration="2m1.662330109s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:32.6584024 +0000 UTC m=+143.694651808" watchObservedRunningTime="2025-09-30 00:11:32.662330109 +0000 UTC m=+143.698579507" Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.686500 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" podStartSLOduration=121.686481751 podStartE2EDuration="2m1.686481751s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:32.685713878 +0000 UTC m=+143.721963286" watchObservedRunningTime="2025-09-30 00:11:32.686481751 +0000 UTC m=+143.722731159" Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.695490 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:32 crc kubenswrapper[4809]: E0930 00:11:32.695817 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:33.195805855 +0000 UTC m=+144.232055263 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.702094 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r67k2" event={"ID":"49e2a7e3-f8ad-4c5a-8616-fca0daa506df","Type":"ContainerStarted","Data":"76955c505eaac59b201091eb05382577cf69f11a0b06921298cecfde8cd5c63d"} Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.712294 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-bc4wl" event={"ID":"64add8f2-553b-45fe-84d6-6123c46532c3","Type":"ContainerStarted","Data":"4eb5e2e8f0779f7b1fddba79720bf78cdac84346be7cc48bc345cd02d4bc11b2"} Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.718212 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4cfbv" event={"ID":"e1fec829-b315-4468-9181-f36c2ee983b6","Type":"ContainerStarted","Data":"3a32034c76bf8560ff6675dfb39b873c39215c86a10c0dd885c08b441054040a"} Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.720320 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-q89hc" event={"ID":"c05dd83a-3728-4184-aad5-666bd79967a5","Type":"ContainerStarted","Data":"8052841563b71ac6c100651c4b163a506db0adc81cdd30d65aadb5339405b974"} Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.725196 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9kv9s" event={"ID":"4d857620-6a88-49db-86a6-0f2cf373d0e8","Type":"ContainerStarted","Data":"c4a53e79a0b193750b009f3abd7174f161f49e7d761e062db9c1c004b2d87ac1"} Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.748213 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-2f5wv" event={"ID":"36e6e320-57c7-44aa-ab53-4cdea9183752","Type":"ContainerStarted","Data":"3a9b6e0bb7f470e3308d67d3b6392814df77e6b3098dcb6ff739ec6a0816a612"} Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.766329 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rnr6q" event={"ID":"6977dfac-797f-4284-ab89-48d3f92f332d","Type":"ContainerStarted","Data":"6ecf915fb4a6261a7e7f3168defe92139f1a26c44a87164ded344d09070aeb74"} Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.766377 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rnr6q" Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.796137 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:32 crc kubenswrapper[4809]: E0930 00:11:32.797373 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:33.297326744 +0000 UTC m=+144.333576152 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.835619 4809 patch_prober.go:28] interesting pod/router-default-5444994796-5gnw9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 00:11:32 crc kubenswrapper[4809]: [-]has-synced failed: reason withheld Sep 30 00:11:32 crc kubenswrapper[4809]: [+]process-running ok Sep 30 00:11:32 crc kubenswrapper[4809]: healthz check failed Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.835719 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5gnw9" podUID="88723807-e4e6-48ce-9d84-a66a57863496" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.897832 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:32 crc kubenswrapper[4809]: E0930 00:11:32.904736 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:33.404709671 +0000 UTC m=+144.440959259 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.951680 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" Sep 30 00:11:32 crc kubenswrapper[4809]: I0930 00:11:32.972794 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" podStartSLOduration=121.972778276 podStartE2EDuration="2m1.972778276s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:32.972219718 +0000 UTC m=+144.008469126" watchObservedRunningTime="2025-09-30 00:11:32.972778276 +0000 UTC m=+144.009027684" Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.004369 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:33 crc kubenswrapper[4809]: E0930 00:11:33.004880 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:33.504859179 +0000 UTC m=+144.541108587 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.018894 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zz87f"] Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.020121 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-4hg76" podStartSLOduration=122.020096202 podStartE2EDuration="2m2.020096202s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:33.019279646 +0000 UTC m=+144.055529054" watchObservedRunningTime="2025-09-30 00:11:33.020096202 +0000 UTC m=+144.056345610" Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.083837 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5t4kq"] Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.133066 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:33 crc kubenswrapper[4809]: E0930 00:11:33.133941 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:33.633923584 +0000 UTC m=+144.670172992 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.159748 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bpgds" podStartSLOduration=122.141109712 podStartE2EDuration="2m2.141109712s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:33.13609348 +0000 UTC m=+144.172342888" watchObservedRunningTime="2025-09-30 00:11:33.141109712 +0000 UTC m=+144.177359120" Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.235064 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:33 crc kubenswrapper[4809]: E0930 00:11:33.235288 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:33.735240437 +0000 UTC m=+144.771489855 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.236188 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:33 crc kubenswrapper[4809]: E0930 00:11:33.236717 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:33.736691981 +0000 UTC m=+144.772941389 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.259607 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-62g6c" podStartSLOduration=122.259583315 podStartE2EDuration="2m2.259583315s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:33.256373798 +0000 UTC m=+144.292623206" watchObservedRunningTime="2025-09-30 00:11:33.259583315 +0000 UTC m=+144.295832723" Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.309598 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jqgvc" podStartSLOduration=122.309578712 podStartE2EDuration="2m2.309578712s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:33.307149488 +0000 UTC m=+144.343398896" watchObservedRunningTime="2025-09-30 00:11:33.309578712 +0000 UTC m=+144.345828120" Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.337187 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:33 crc kubenswrapper[4809]: E0930 00:11:33.337558 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:33.83754106 +0000 UTC m=+144.873790458 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.349188 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-zlcqw" podStartSLOduration=122.349163453 podStartE2EDuration="2m2.349163453s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:33.348006528 +0000 UTC m=+144.384255936" watchObservedRunningTime="2025-09-30 00:11:33.349163453 +0000 UTC m=+144.385412861" Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.438522 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:33 crc kubenswrapper[4809]: E0930 00:11:33.438881 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:33.938867944 +0000 UTC m=+144.975117352 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.442253 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-9sszs" podStartSLOduration=122.442227105 podStartE2EDuration="2m2.442227105s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:33.379810622 +0000 UTC m=+144.416060040" watchObservedRunningTime="2025-09-30 00:11:33.442227105 +0000 UTC m=+144.478476513" Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.453097 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-m4bcx" podStartSLOduration=122.453068594 podStartE2EDuration="2m2.453068594s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:33.440499024 +0000 UTC m=+144.476748432" watchObservedRunningTime="2025-09-30 00:11:33.453068594 +0000 UTC m=+144.489318002" Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.540152 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:33 crc kubenswrapper[4809]: E0930 00:11:33.540839 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:34.040822676 +0000 UTC m=+145.077072084 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.613076 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-pruner-29319840-n77h7" podStartSLOduration=122.613056847 podStartE2EDuration="2m2.613056847s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:33.613044647 +0000 UTC m=+144.649294055" watchObservedRunningTime="2025-09-30 00:11:33.613056847 +0000 UTC m=+144.649306255" Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.643025 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:33 crc kubenswrapper[4809]: E0930 00:11:33.645053 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:34.145035248 +0000 UTC m=+145.181284656 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.669840 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-5gnw9" podStartSLOduration=122.669825439 podStartE2EDuration="2m2.669825439s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:33.667977373 +0000 UTC m=+144.704226781" watchObservedRunningTime="2025-09-30 00:11:33.669825439 +0000 UTC m=+144.706074847" Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.732095 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7zskd"] Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.745321 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:33 crc kubenswrapper[4809]: E0930 00:11:33.746058 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:34.246011811 +0000 UTC m=+145.282261219 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.777737 4809 patch_prober.go:28] interesting pod/router-default-5444994796-5gnw9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 00:11:33 crc kubenswrapper[4809]: [-]has-synced failed: reason withheld Sep 30 00:11:33 crc kubenswrapper[4809]: [+]process-running ok Sep 30 00:11:33 crc kubenswrapper[4809]: healthz check failed Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.777804 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5gnw9" podUID="88723807-e4e6-48ce-9d84-a66a57863496" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.785665 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f9r9k"] Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.798256 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-q89hc" event={"ID":"c05dd83a-3728-4184-aad5-666bd79967a5","Type":"ContainerStarted","Data":"cff9f27e67238c2ccf7c2bc9f9025a243860c4505153e0822673f083627c0366"} Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.799020 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-q89hc" Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.822710 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-clhm9"] Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.849756 4809 patch_prober.go:28] interesting pod/console-operator-58897d9998-q89hc container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.18:8443/readyz\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.849851 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-q89hc" podUID="c05dd83a-3728-4184-aad5-666bd79967a5" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.18:8443/readyz\": dial tcp 10.217.0.18:8443: connect: connection refused" Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.850051 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9kv9s" event={"ID":"4d857620-6a88-49db-86a6-0f2cf373d0e8","Type":"ContainerStarted","Data":"15d4bfce955195537dd4f5f1fc0a4bb92c1fb09bcb8fb8b089c3fe2c7c09f6f8"} Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.852111 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:33 crc kubenswrapper[4809]: E0930 00:11:33.852540 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:34.352526392 +0000 UTC m=+145.388775800 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.864513 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zz87f" event={"ID":"0e942a3a-1bca-4fc8-865c-37899588219b","Type":"ContainerStarted","Data":"14efc09928821e3dfaf916fb6ebcea7ac44becaafd2445e49f1e9cf964db2e68"} Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.940957 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-2f5wv" event={"ID":"36e6e320-57c7-44aa-ab53-4cdea9183752","Type":"ContainerStarted","Data":"a05d87eb9627b3fec243b41b51117f7c905f62dd971cf11390e4b08f6ec5817d"} Sep 30 00:11:33 crc kubenswrapper[4809]: I0930 00:11:33.963907 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:33 crc kubenswrapper[4809]: E0930 00:11:33.980091 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:34.480035539 +0000 UTC m=+145.516284947 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.006896 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" event={"ID":"5c458d1d-1777-46d0-a001-ebb0cae0e16f","Type":"ContainerStarted","Data":"13ccc5d99dc9913c7fb3efc6e63b05bfd7bc3f0dfc5912b6c4a6730d09deaeff"} Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.013774 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" podStartSLOduration=123.013751421 podStartE2EDuration="2m3.013751421s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:34.006524163 +0000 UTC m=+145.042773571" watchObservedRunningTime="2025-09-30 00:11:34.013751421 +0000 UTC m=+145.050000829" Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.015154 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4cfbv" event={"ID":"e1fec829-b315-4468-9181-f36c2ee983b6","Type":"ContainerStarted","Data":"132255a41751c50f9eef1e34b32e5b31206b4aebdf69f83815e70c84148e1652"} Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.051226 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5t4kq" event={"ID":"4e3be393-d661-43f7-ba25-80e0e84b8b72","Type":"ContainerStarted","Data":"de1d73e506ddfa9c4622280462abc68429c3fec82ae3e679956dcf1f1e036f17"} Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.067995 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-sz2pd"] Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.072848 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:34 crc kubenswrapper[4809]: E0930 00:11:34.074318 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:34.574304729 +0000 UTC m=+145.610554137 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.095902 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9kv9s" podStartSLOduration=123.095882393 podStartE2EDuration="2m3.095882393s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:34.087013574 +0000 UTC m=+145.123262982" watchObservedRunningTime="2025-09-30 00:11:34.095882393 +0000 UTC m=+145.132131801" Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.096370 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6r6lv"] Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.096422 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-962sz" event={"ID":"60c02e8f-1cb7-402b-a065-2f3b207cf60b","Type":"ContainerStarted","Data":"21ca27988dc83dfe63c0debfb9d2e5932ba2789fb9273da2d7956513efabbb72"} Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.105498 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj"] Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.123128 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-bc4wl" event={"ID":"64add8f2-553b-45fe-84d6-6123c46532c3","Type":"ContainerStarted","Data":"240c8d5215b7f84cbdc0a11f86c5d4020b35933fd0cf2e2833bf926e477b0075"} Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.124139 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-bc4wl" Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.160998 4809 patch_prober.go:28] interesting pod/downloads-7954f5f757-bc4wl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.161049 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-bc4wl" podUID="64add8f2-553b-45fe-84d6-6123c46532c3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.161567 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qmgd4" event={"ID":"81d9a2da-ae5c-437c-9460-eef4019785df","Type":"ContainerStarted","Data":"26efed200d5ba5f407f41a0f6d80f13fed43317cd51543f67adbf7bd8eb524f1"} Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.185195 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:34 crc kubenswrapper[4809]: E0930 00:11:34.186530 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:34.686510112 +0000 UTC m=+145.722759520 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.187428 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-q89hc" podStartSLOduration=123.187412399 podStartE2EDuration="2m3.187412399s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:34.186633946 +0000 UTC m=+145.222883364" watchObservedRunningTime="2025-09-30 00:11:34.187412399 +0000 UTC m=+145.223661807" Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.189177 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rnr6q" podStartSLOduration=123.189169703 podStartE2EDuration="2m3.189169703s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:34.117723305 +0000 UTC m=+145.153972713" watchObservedRunningTime="2025-09-30 00:11:34.189169703 +0000 UTC m=+145.225419111" Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.197778 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-kdnqq"] Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.231402 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm"] Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.260147 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkmqg"] Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.261025 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r67k2" podStartSLOduration=123.261015072 podStartE2EDuration="2m3.261015072s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:34.242748488 +0000 UTC m=+145.278997916" watchObservedRunningTime="2025-09-30 00:11:34.261015072 +0000 UTC m=+145.297264480" Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.288822 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:34 crc kubenswrapper[4809]: E0930 00:11:34.292838 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:34.792821497 +0000 UTC m=+145.829070905 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.300688 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-wkqff"] Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.301495 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-2f5wv" podStartSLOduration=6.301463299 podStartE2EDuration="6.301463299s" podCreationTimestamp="2025-09-30 00:11:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:34.282602297 +0000 UTC m=+145.318851715" watchObservedRunningTime="2025-09-30 00:11:34.301463299 +0000 UTC m=+145.337712717" Sep 30 00:11:34 crc kubenswrapper[4809]: W0930 00:11:34.347880 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod84b2b582_7e6f_491e_8164_beb11b7ee33c.slice/crio-39654a4588b2b2a75f8481c37877b3e5cff503c502149da6e7e190d731063bd7 WatchSource:0}: Error finding container 39654a4588b2b2a75f8481c37877b3e5cff503c502149da6e7e190d731063bd7: Status 404 returned error can't find the container with id 39654a4588b2b2a75f8481c37877b3e5cff503c502149da6e7e190d731063bd7 Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.370319 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4cfbv" podStartSLOduration=123.370279237 podStartE2EDuration="2m3.370279237s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:34.343244296 +0000 UTC m=+145.379493704" watchObservedRunningTime="2025-09-30 00:11:34.370279237 +0000 UTC m=+145.406528645" Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.391970 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:34 crc kubenswrapper[4809]: E0930 00:11:34.392255 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:34.892237922 +0000 UTC m=+145.928487330 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.435186 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-9j5tx"] Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.459256 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8"] Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.459755 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v"] Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.503944 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-m9zbh"] Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.505451 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:34 crc kubenswrapper[4809]: E0930 00:11:34.505861 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:35.005845439 +0000 UTC m=+146.042094847 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.535471 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-bc4wl" podStartSLOduration=123.535428415 podStartE2EDuration="2m3.535428415s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:34.384134556 +0000 UTC m=+145.420383964" watchObservedRunningTime="2025-09-30 00:11:34.535428415 +0000 UTC m=+145.571677823" Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.553355 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5t4kq" podStartSLOduration=123.553336269 podStartE2EDuration="2m3.553336269s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:34.422361966 +0000 UTC m=+145.458611374" watchObservedRunningTime="2025-09-30 00:11:34.553336269 +0000 UTC m=+145.589585677" Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.585698 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-cz7bk"] Sep 30 00:11:34 crc kubenswrapper[4809]: W0930 00:11:34.590907 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod167b672c_2aed_451c_8117_e723c8cd10b5.slice/crio-0fc3a9ef7609022b0aa2de7683a5bd0ad144b98819b27ea30b538f6e15eaee45 WatchSource:0}: Error finding container 0fc3a9ef7609022b0aa2de7683a5bd0ad144b98819b27ea30b538f6e15eaee45: Status 404 returned error can't find the container with id 0fc3a9ef7609022b0aa2de7683a5bd0ad144b98819b27ea30b538f6e15eaee45 Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.594198 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f6dzz"] Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.606991 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" podStartSLOduration=123.606953015 podStartE2EDuration="2m3.606953015s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:34.468177515 +0000 UTC m=+145.504426923" watchObservedRunningTime="2025-09-30 00:11:34.606953015 +0000 UTC m=+145.643202423" Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.614929 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:34 crc kubenswrapper[4809]: E0930 00:11:34.616462 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:35.116421652 +0000 UTC m=+146.152671060 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.616614 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:34 crc kubenswrapper[4809]: E0930 00:11:34.619102 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:35.119092843 +0000 UTC m=+146.155342251 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.663446 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7ct6f"] Sep 30 00:11:34 crc kubenswrapper[4809]: E0930 00:11:34.718436 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:35.218414376 +0000 UTC m=+146.254663784 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.718949 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.719154 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:34 crc kubenswrapper[4809]: E0930 00:11:34.719502 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:35.219492379 +0000 UTC m=+146.255741787 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.773528 4809 patch_prober.go:28] interesting pod/router-default-5444994796-5gnw9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 00:11:34 crc kubenswrapper[4809]: [-]has-synced failed: reason withheld Sep 30 00:11:34 crc kubenswrapper[4809]: [+]process-running ok Sep 30 00:11:34 crc kubenswrapper[4809]: healthz check failed Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.773604 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5gnw9" podUID="88723807-e4e6-48ce-9d84-a66a57863496" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.821053 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:34 crc kubenswrapper[4809]: E0930 00:11:34.821191 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:35.321163002 +0000 UTC m=+146.357412410 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.821672 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:34 crc kubenswrapper[4809]: E0930 00:11:34.822191 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:35.322147393 +0000 UTC m=+146.358396791 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.923475 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:34 crc kubenswrapper[4809]: E0930 00:11:34.924003 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:35.423971351 +0000 UTC m=+146.460220779 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.924111 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:34 crc kubenswrapper[4809]: E0930 00:11:34.924471 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:35.424438635 +0000 UTC m=+146.460688043 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.953632 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:34 crc kubenswrapper[4809]: I0930 00:11:34.953696 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.024710 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:35 crc kubenswrapper[4809]: E0930 00:11:35.025268 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:35.525252363 +0000 UTC m=+146.561501771 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.134316 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:35 crc kubenswrapper[4809]: E0930 00:11:35.134780 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:35.634764325 +0000 UTC m=+146.671013733 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.233105 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f9r9k" event={"ID":"a2031b74-3b79-422f-86c0-f4ddad880624","Type":"ContainerStarted","Data":"8b1ee7022b12206627a743bf33ad87260170b00c7bca0b11491a16740a4af81d"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.233179 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f9r9k" event={"ID":"a2031b74-3b79-422f-86c0-f4ddad880624","Type":"ContainerStarted","Data":"591503d8ac43105918e45e43a5148a402b9db326a1b123c73c6a288b932888c5"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.235162 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:35 crc kubenswrapper[4809]: E0930 00:11:35.235746 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:35.735726147 +0000 UTC m=+146.771975565 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.260849 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-962sz" event={"ID":"60c02e8f-1cb7-402b-a065-2f3b207cf60b","Type":"ContainerStarted","Data":"8c01888850f267a491e2fa573ca86a2aefb01f5ec5771670cc98e423d1f5a59a"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.281903 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-kdnqq" event={"ID":"ef8fb279-c7ac-480d-a945-d498ec0f523a","Type":"ContainerStarted","Data":"2168ad31a9c84066a6bc8b86fb087885341aae5e063c7cc1e30962422064ef37"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.328486 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zz87f" event={"ID":"0e942a3a-1bca-4fc8-865c-37899588219b","Type":"ContainerStarted","Data":"8acfa179b5373c829bda3819d352747304e805185bd9bed04e9d447063380874"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.334677 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkmqg" event={"ID":"e8ab8a4b-c99d-4063-8791-33eecc55996e","Type":"ContainerStarted","Data":"0a83b9b8be832b32e8eef705b71d1e14259d94ad07c877c0d46e0b2d8e37d9bd"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.341027 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:35 crc kubenswrapper[4809]: E0930 00:11:35.343770 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:35.843754005 +0000 UTC m=+146.880003413 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.362770 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f6dzz" event={"ID":"167b672c-2aed-451c-8117-e723c8cd10b5","Type":"ContainerStarted","Data":"0fc3a9ef7609022b0aa2de7683a5bd0ad144b98819b27ea30b538f6e15eaee45"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.385399 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qmgd4" event={"ID":"81d9a2da-ae5c-437c-9460-eef4019785df","Type":"ContainerStarted","Data":"9fba56f60cfd73e23044f8c33800c3c3792aad0f11662df157fe7e3372e72b97"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.385462 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qmgd4" event={"ID":"81d9a2da-ae5c-437c-9460-eef4019785df","Type":"ContainerStarted","Data":"0f46700ccd02b41ec2b3d362d873036f284e7985508705e582ba137f6b5d4574"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.406767 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8" event={"ID":"df51fff3-849a-4fa7-976d-e54f548af4b7","Type":"ContainerStarted","Data":"903cc010fcc5f4512404569dc38e57c77cd20da6e832df3e02c5cecd07fc142c"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.433896 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sz2pd" event={"ID":"a8337228-dd2f-4b8d-9f5f-6bc4d9f9adeb","Type":"ContainerStarted","Data":"0b72008e7fab81527d1082670deaac61912943ab003f4e43b43c3c7318b5f756"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.443559 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:35 crc kubenswrapper[4809]: E0930 00:11:35.444120 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:35.944091358 +0000 UTC m=+146.980340766 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.521858 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-962sz" podStartSLOduration=124.521842007 podStartE2EDuration="2m4.521842007s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:35.519769484 +0000 UTC m=+146.556018892" watchObservedRunningTime="2025-09-30 00:11:35.521842007 +0000 UTC m=+146.558091415" Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.521945 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-wkqff" event={"ID":"84b2b582-7e6f-491e-8164-beb11b7ee33c","Type":"ContainerStarted","Data":"39654a4588b2b2a75f8481c37877b3e5cff503c502149da6e7e190d731063bd7"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.544927 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:35 crc kubenswrapper[4809]: E0930 00:11:35.545822 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:36.045805133 +0000 UTC m=+147.082054541 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.576200 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qmgd4" podStartSLOduration=124.576179685 podStartE2EDuration="2m4.576179685s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:35.565001466 +0000 UTC m=+146.601250874" watchObservedRunningTime="2025-09-30 00:11:35.576179685 +0000 UTC m=+146.612429093" Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.587812 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v" event={"ID":"960a6573-c6e4-49fe-9aff-0c9b43435215","Type":"ContainerStarted","Data":"6d7dd9f6c354897bac28cd7a05f7e0268563c3e4e352b32d4fa07a27c419c0b0"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.597486 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-cz7bk" event={"ID":"a7429ac3-1745-4020-8463-6dd90982a5f3","Type":"ContainerStarted","Data":"91bab0ac15cc0196c5a4cf518d56874c1a985d47bcc3d1b67000f483e23eeda5"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.599633 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.643307 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7zskd" event={"ID":"5f0e64f9-08d4-4599-8731-cb62b636de47","Type":"ContainerStarted","Data":"22860ed7a4650bc76e808c8b6c3dc0e701fe9e082553a904509607aad759610a"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.644718 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7zskd" event={"ID":"5f0e64f9-08d4-4599-8731-cb62b636de47","Type":"ContainerStarted","Data":"f4701b0938963763880f2b770bda205c04356f00f3ac019520ffadf354159f08"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.661311 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.663695 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zz87f" podStartSLOduration=124.663676639 podStartE2EDuration="2m4.663676639s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:35.651620493 +0000 UTC m=+146.687869901" watchObservedRunningTime="2025-09-30 00:11:35.663676639 +0000 UTC m=+146.699926047" Sep 30 00:11:35 crc kubenswrapper[4809]: E0930 00:11:35.666906 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:36.166880606 +0000 UTC m=+147.203130014 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.683562 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-m9zbh" event={"ID":"3a06ac13-0534-4f4a-a6d0-62cb79f6e512","Type":"ContainerStarted","Data":"fd7f8b87eb1b07adafbadffab1d5037a4f7b5e21705b65f303d88715d4f7e43b"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.742392 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f9r9k" podStartSLOduration=124.742363485 podStartE2EDuration="2m4.742363485s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:35.703070603 +0000 UTC m=+146.739320011" watchObservedRunningTime="2025-09-30 00:11:35.742363485 +0000 UTC m=+146.778612893" Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.763907 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5t4kq" event={"ID":"4e3be393-d661-43f7-ba25-80e0e84b8b72","Type":"ContainerStarted","Data":"58a43cd4aa9b446f3f28ef1da3b0f369695a2f4673412f469852a458fd05c87e"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.763976 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" event={"ID":"413c828b-94ff-4866-a5ed-a533cf34a56a","Type":"ContainerStarted","Data":"a2f44d18c938908b4607bd18050c8fcd4404206638c2bbb991ec13b57b114770"} Sep 30 00:11:35 crc kubenswrapper[4809]: E0930 00:11:35.771860 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:36.27184201 +0000 UTC m=+147.308091418 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.771291 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.773756 4809 patch_prober.go:28] interesting pod/router-default-5444994796-5gnw9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 00:11:35 crc kubenswrapper[4809]: [-]has-synced failed: reason withheld Sep 30 00:11:35 crc kubenswrapper[4809]: [+]process-running ok Sep 30 00:11:35 crc kubenswrapper[4809]: healthz check failed Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.773816 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5gnw9" podUID="88723807-e4e6-48ce-9d84-a66a57863496" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.786561 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-9j5tx" event={"ID":"137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3","Type":"ContainerStarted","Data":"ee227e6afab1adb894b6d2b3a1d33fd18fc9dd6946825912ef3d6f35ce3f3a41"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.826465 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" event={"ID":"edff5a66-7ade-44f3-a770-62a25d56e674","Type":"ContainerStarted","Data":"7ffff9cf5803dc0ec65cbbdf73a62bfa358aad7d176b13c8bcf397e14e734ff8"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.826558 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" event={"ID":"edff5a66-7ade-44f3-a770-62a25d56e674","Type":"ContainerStarted","Data":"aa412240d9904248be6cb342e7aafbe041cd5613ada4c31664e5a8f89d1bef81"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.827137 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.846918 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm" event={"ID":"11bd97c9-d0d7-4758-b3af-3386ebf8bb69","Type":"ContainerStarted","Data":"b93c192186bf909565340fa1bc03660ae2d9e64541dc1eb29a70915e7373122c"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.846983 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm" event={"ID":"11bd97c9-d0d7-4758-b3af-3386ebf8bb69","Type":"ContainerStarted","Data":"8b43b242d3697a362fe2d699ef31a86689cce3af5b6a8cc3c55519bb61544ade"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.847868 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rnr6q" Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.850972 4809 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-clhm9 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.851025 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" podUID="edff5a66-7ade-44f3-a770-62a25d56e674" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.857745 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" podStartSLOduration=124.857723964 podStartE2EDuration="2m4.857723964s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:35.856316602 +0000 UTC m=+146.892566020" watchObservedRunningTime="2025-09-30 00:11:35.857723964 +0000 UTC m=+146.893973372" Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.860372 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7zskd" podStartSLOduration=124.860363364 podStartE2EDuration="2m4.860363364s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:35.801698725 +0000 UTC m=+146.837948143" watchObservedRunningTime="2025-09-30 00:11:35.860363364 +0000 UTC m=+146.896612762" Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.858660 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6r6lv" event={"ID":"65ff30ff-b242-4857-9927-d8ab7620dd46","Type":"ContainerStarted","Data":"c8883e40b243c9e0d610a2fad81721519c1b758ce45cfe40efb61b371ca9112d"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.874594 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:35 crc kubenswrapper[4809]: E0930 00:11:35.876960 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:36.376935417 +0000 UTC m=+147.413184825 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.886871 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj" event={"ID":"b1ad66e1-e4b8-416f-b7e8-2b965c866123","Type":"ContainerStarted","Data":"5e4d20b42d37d19d6acbbe18b2c31b2dc9e5b3867a842fe99549a73cc12e93a0"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.886917 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj" event={"ID":"b1ad66e1-e4b8-416f-b7e8-2b965c866123","Type":"ContainerStarted","Data":"6839a3faa86de417ddbff76dcc382cc5d6d2d5c301d2bc15b1cfa8f284e866d9"} Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.886939 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj" Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.897786 4809 patch_prober.go:28] interesting pod/downloads-7954f5f757-bc4wl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.897847 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-bc4wl" podUID="64add8f2-553b-45fe-84d6-6123c46532c3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.897927 4809 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-xpwlj container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:8443/healthz\": dial tcp 10.217.0.37:8443: connect: connection refused" start-of-body= Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.897942 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj" podUID="b1ad66e1-e4b8-416f-b7e8-2b965c866123" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.37:8443/healthz\": dial tcp 10.217.0.37:8443: connect: connection refused" Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.905902 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jm5f4" Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.951103 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6r6lv" podStartSLOduration=124.951081537 podStartE2EDuration="2m4.951081537s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:35.914632361 +0000 UTC m=+146.950881769" watchObservedRunningTime="2025-09-30 00:11:35.951081537 +0000 UTC m=+146.987330945" Sep 30 00:11:35 crc kubenswrapper[4809]: I0930 00:11:35.985987 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:35 crc kubenswrapper[4809]: E0930 00:11:35.987030 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:36.487010997 +0000 UTC m=+147.523260405 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.039542 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj" podStartSLOduration=125.039516019 podStartE2EDuration="2m5.039516019s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:35.960214214 +0000 UTC m=+146.996463622" watchObservedRunningTime="2025-09-30 00:11:36.039516019 +0000 UTC m=+147.075765427" Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.089411 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:36 crc kubenswrapper[4809]: E0930 00:11:36.091501 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:36.591466715 +0000 UTC m=+147.627716123 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.190841 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:36 crc kubenswrapper[4809]: E0930 00:11:36.191555 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:36.691541761 +0000 UTC m=+147.727791169 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.294772 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:36 crc kubenswrapper[4809]: E0930 00:11:36.295389 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:36.795343599 +0000 UTC m=+147.831593007 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.297656 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-q89hc" Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.396963 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:36 crc kubenswrapper[4809]: E0930 00:11:36.397311 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:36.897297482 +0000 UTC m=+147.933546890 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.498408 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:36 crc kubenswrapper[4809]: E0930 00:11:36.498635 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:36.998614245 +0000 UTC m=+148.034863653 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.498776 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:36 crc kubenswrapper[4809]: E0930 00:11:36.499151 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:36.99913983 +0000 UTC m=+148.035389238 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.599544 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:36 crc kubenswrapper[4809]: E0930 00:11:36.599746 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:37.099714061 +0000 UTC m=+148.135963469 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.599945 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:36 crc kubenswrapper[4809]: E0930 00:11:36.600361 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:37.100346371 +0000 UTC m=+148.136595779 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.700959 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:36 crc kubenswrapper[4809]: E0930 00:11:36.701164 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:37.201131218 +0000 UTC m=+148.237380636 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.702906 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:36 crc kubenswrapper[4809]: E0930 00:11:36.703419 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:37.203406276 +0000 UTC m=+148.239655694 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.768715 4809 patch_prober.go:28] interesting pod/router-default-5444994796-5gnw9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 00:11:36 crc kubenswrapper[4809]: [-]has-synced failed: reason withheld Sep 30 00:11:36 crc kubenswrapper[4809]: [+]process-running ok Sep 30 00:11:36 crc kubenswrapper[4809]: healthz check failed Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.768799 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5gnw9" podUID="88723807-e4e6-48ce-9d84-a66a57863496" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.803870 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:36 crc kubenswrapper[4809]: E0930 00:11:36.804118 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:37.30408256 +0000 UTC m=+148.340331978 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.804824 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:36 crc kubenswrapper[4809]: E0930 00:11:36.805257 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:37.305245616 +0000 UTC m=+148.341495224 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.894138 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-m9zbh" event={"ID":"3a06ac13-0534-4f4a-a6d0-62cb79f6e512","Type":"ContainerStarted","Data":"21b6e0e5abfc5501d9c304677f28e59aa44cbf64af384d293ac4e879139eb2fd"} Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.897243 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v" event={"ID":"960a6573-c6e4-49fe-9aff-0c9b43435215","Type":"ContainerStarted","Data":"8758d9baefb97fe777fe1c85b8a5e1569b491929745518fe92cb5ac1e892ea06"} Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.899796 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkmqg" event={"ID":"e8ab8a4b-c99d-4063-8791-33eecc55996e","Type":"ContainerStarted","Data":"fab77b9e038d68e4fbf09c7c5cc5678632a795a5f05d2daa4a3ed4dfc625bc91"} Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.899830 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkmqg" event={"ID":"e8ab8a4b-c99d-4063-8791-33eecc55996e","Type":"ContainerStarted","Data":"219531454f119e1e5ca3a6a9ddca0711e1fc60511beccc6f475578cd689cee0a"} Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.899926 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkmqg" Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.902217 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-cz7bk" event={"ID":"a7429ac3-1745-4020-8463-6dd90982a5f3","Type":"ContainerStarted","Data":"2f8517d24a5a1c007b21014a8917acf5bcb4c36a15cc37d0d96d99012b4f58c3"} Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.902489 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-cz7bk" event={"ID":"a7429ac3-1745-4020-8463-6dd90982a5f3","Type":"ContainerStarted","Data":"5f55269fda6bbfeced9a8eac6f9c498496f2184f80405a00009b22e1bbda8721"} Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.902588 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-cz7bk" Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.904483 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm" event={"ID":"11bd97c9-d0d7-4758-b3af-3386ebf8bb69","Type":"ContainerStarted","Data":"3793731b76f52a1f05ee135a61cf3926a83c4c21b836de7fa9aa9b83757dc593"} Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.905321 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:36 crc kubenswrapper[4809]: E0930 00:11:36.905431 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:37.405406324 +0000 UTC m=+148.441655772 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.905531 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:36 crc kubenswrapper[4809]: E0930 00:11:36.910689 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:37.410669923 +0000 UTC m=+148.446919331 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.927295 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-m9zbh" podStartSLOduration=125.927266707 podStartE2EDuration="2m5.927266707s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:36.924133302 +0000 UTC m=+147.960382710" watchObservedRunningTime="2025-09-30 00:11:36.927266707 +0000 UTC m=+147.963516115" Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.929037 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6r6lv" event={"ID":"65ff30ff-b242-4857-9927-d8ab7620dd46","Type":"ContainerStarted","Data":"489af7aae74da39b56865f897aed44c47b01bce60b173e3563385c19d6eda781"} Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.953022 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-kdnqq" event={"ID":"ef8fb279-c7ac-480d-a945-d498ec0f523a","Type":"ContainerStarted","Data":"f20cff14b642ed73dc5a2c4ee174713b571cb6a40d22bb1b8da9ba90be7ebe40"} Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.962021 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v" podStartSLOduration=125.961995061 podStartE2EDuration="2m5.961995061s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:36.961268248 +0000 UTC m=+147.997517656" watchObservedRunningTime="2025-09-30 00:11:36.961995061 +0000 UTC m=+147.998244469" Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.970763 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sz2pd" event={"ID":"a8337228-dd2f-4b8d-9f5f-6bc4d9f9adeb","Type":"ContainerStarted","Data":"11f968f86e8ac1450003b2f629aac3fb94d03776e095ec0fb7424ec9d81c1c9a"} Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.971143 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sz2pd" event={"ID":"a8337228-dd2f-4b8d-9f5f-6bc4d9f9adeb","Type":"ContainerStarted","Data":"37986b28bbb59cbb850a9b579a3f77e00476747114bae855468cea3dc872bbff"} Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.980493 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f6dzz" event={"ID":"167b672c-2aed-451c-8117-e723c8cd10b5","Type":"ContainerStarted","Data":"0aebdddba0712d1fa4dd28d8c3ce65d046fe088ed9fe6823fc4cf3efd857d83b"} Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.982437 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f6dzz" Sep 30 00:11:36 crc kubenswrapper[4809]: I0930 00:11:36.992972 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" event={"ID":"413c828b-94ff-4866-a5ed-a533cf34a56a","Type":"ContainerStarted","Data":"a7c7b91e9fcf348e3855b81e5816fa08268668e53775c96e35b0de63eb3f2432"} Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.007420 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-9j5tx" event={"ID":"137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3","Type":"ContainerStarted","Data":"d77ce9057eac66c2c6c4f1b58c677adc5d1c54377614b8a532cbb4388c0f50b4"} Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.007484 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-9j5tx" event={"ID":"137cc61b-f88a-4501-a3ba-9ebc5c1fc1c3","Type":"ContainerStarted","Data":"afc72831a43103d38bf513467725ec3abe8bba515ba850b60f276f6f60411b17"} Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.004319 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cbvmm" podStartSLOduration=126.004290054 podStartE2EDuration="2m6.004290054s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:36.986110502 +0000 UTC m=+148.022359920" watchObservedRunningTime="2025-09-30 00:11:37.004290054 +0000 UTC m=+148.040539462" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.012887 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f6dzz" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.013794 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:37 crc kubenswrapper[4809]: E0930 00:11:37.015005 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:37.514973008 +0000 UTC m=+148.551222416 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.017848 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.019502 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkmqg" podStartSLOduration=126.019481994 podStartE2EDuration="2m6.019481994s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:37.018870006 +0000 UTC m=+148.055119414" watchObservedRunningTime="2025-09-30 00:11:37.019481994 +0000 UTC m=+148.055731402" Sep 30 00:11:37 crc kubenswrapper[4809]: E0930 00:11:37.019588 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:37.519567287 +0000 UTC m=+148.555816695 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.032053 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-wkqff" event={"ID":"84b2b582-7e6f-491e-8164-beb11b7ee33c","Type":"ContainerStarted","Data":"6eb29f6a01eb457f337bb699026a8a0113e1363528aae4e63ca000e4e99548ea"} Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.053853 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-cz7bk" podStartSLOduration=9.053820585 podStartE2EDuration="9.053820585s" podCreationTimestamp="2025-09-30 00:11:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:37.045296747 +0000 UTC m=+148.081546145" watchObservedRunningTime="2025-09-30 00:11:37.053820585 +0000 UTC m=+148.090069993" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.056483 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8" event={"ID":"df51fff3-849a-4fa7-976d-e54f548af4b7","Type":"ContainerStarted","Data":"aa0e62e00c0ea9faab63aebe2e1cc6ff034cfbd5687fec28a8558562fe95b4df"} Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.063308 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.057012 4809 patch_prober.go:28] interesting pod/downloads-7954f5f757-bc4wl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.063546 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-bc4wl" podUID="64add8f2-553b-45fe-84d6-6123c46532c3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.057343 4809 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-clhm9 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.064163 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" podUID="edff5a66-7ade-44f3-a770-62a25d56e674" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.071828 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-xpwlj" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.104508 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-wkqff" podStartSLOduration=9.104490563 podStartE2EDuration="9.104490563s" podCreationTimestamp="2025-09-30 00:11:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:37.079687841 +0000 UTC m=+148.115937249" watchObservedRunningTime="2025-09-30 00:11:37.104490563 +0000 UTC m=+148.140739971" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.104776 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-kdnqq" podStartSLOduration=126.104772512 podStartE2EDuration="2m6.104772512s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:37.102168413 +0000 UTC m=+148.138417831" watchObservedRunningTime="2025-09-30 00:11:37.104772512 +0000 UTC m=+148.141021920" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.120314 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:37 crc kubenswrapper[4809]: E0930 00:11:37.121925 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:37.621893171 +0000 UTC m=+148.658142579 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.158984 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-9j5tx" podStartSLOduration=126.158963545 podStartE2EDuration="2m6.158963545s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:37.129665037 +0000 UTC m=+148.165914445" watchObservedRunningTime="2025-09-30 00:11:37.158963545 +0000 UTC m=+148.195212953" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.159813 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f6dzz" podStartSLOduration=126.15980716 podStartE2EDuration="2m6.15980716s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:37.157197611 +0000 UTC m=+148.193447019" watchObservedRunningTime="2025-09-30 00:11:37.15980716 +0000 UTC m=+148.196056568" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.197097 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sz2pd" podStartSLOduration=126.197077832 podStartE2EDuration="2m6.197077832s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:37.195963687 +0000 UTC m=+148.232213095" watchObservedRunningTime="2025-09-30 00:11:37.197077832 +0000 UTC m=+148.233327240" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.224095 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:37 crc kubenswrapper[4809]: E0930 00:11:37.224614 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:37.724594845 +0000 UTC m=+148.760844253 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.279264 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8" podStartSLOduration=126.279245234 podStartE2EDuration="2m6.279245234s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:37.277322335 +0000 UTC m=+148.313571733" watchObservedRunningTime="2025-09-30 00:11:37.279245234 +0000 UTC m=+148.315494642" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.324745 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:37 crc kubenswrapper[4809]: E0930 00:11:37.325087 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:37.825070603 +0000 UTC m=+148.861320011 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.426922 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:37 crc kubenswrapper[4809]: E0930 00:11:37.427346 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:37.927328746 +0000 UTC m=+148.963578154 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.528798 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.528943 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:11:37 crc kubenswrapper[4809]: E0930 00:11:37.528982 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:38.028954248 +0000 UTC m=+149.065203656 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.529088 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.529287 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:11:37 crc kubenswrapper[4809]: E0930 00:11:37.529493 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:38.029474494 +0000 UTC m=+149.065723902 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.529984 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.537992 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.627199 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.630889 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:37 crc kubenswrapper[4809]: E0930 00:11:37.631054 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:38.131024454 +0000 UTC m=+149.167273862 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.631171 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.631213 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.631249 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:37 crc kubenswrapper[4809]: E0930 00:11:37.631766 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:38.131738446 +0000 UTC m=+149.167987854 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.635501 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.635926 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:11:37 crc kubenswrapper[4809]: E0930 00:11:37.732818 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:38.232801822 +0000 UTC m=+149.269051230 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.732815 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.733136 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:37 crc kubenswrapper[4809]: E0930 00:11:37.733444 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:38.233436721 +0000 UTC m=+149.269686129 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.781814 4809 patch_prober.go:28] interesting pod/router-default-5444994796-5gnw9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 00:11:37 crc kubenswrapper[4809]: [-]has-synced failed: reason withheld Sep 30 00:11:37 crc kubenswrapper[4809]: [+]process-running ok Sep 30 00:11:37 crc kubenswrapper[4809]: healthz check failed Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.782129 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5gnw9" podUID="88723807-e4e6-48ce-9d84-a66a57863496" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.839236 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:37 crc kubenswrapper[4809]: E0930 00:11:37.839477 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:38.339443496 +0000 UTC m=+149.375692914 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.853903 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-m5cr8" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.911010 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.922171 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:11:37 crc kubenswrapper[4809]: I0930 00:11:37.941874 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:37 crc kubenswrapper[4809]: E0930 00:11:37.942449 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:38.44243226 +0000 UTC m=+149.478681668 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.042861 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:38 crc kubenswrapper[4809]: E0930 00:11:38.043168 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:38.543152856 +0000 UTC m=+149.579402264 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.104534 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" event={"ID":"413c828b-94ff-4866-a5ed-a533cf34a56a","Type":"ContainerStarted","Data":"686c66f5cfd7cca9de5ab9e812627939a2e62fde86a57d3c9e8f56d6fd0c38c1"} Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.133219 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.159812 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:38 crc kubenswrapper[4809]: E0930 00:11:38.160309 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:38.660293318 +0000 UTC m=+149.696542726 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.217405 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-j2ssl"] Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.237498 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j2ssl"] Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.237626 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j2ssl" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.245883 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.260579 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.260945 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c28e622-d89b-4819-b346-5cc07af83b8b-utilities\") pod \"community-operators-j2ssl\" (UID: \"9c28e622-d89b-4819-b346-5cc07af83b8b\") " pod="openshift-marketplace/community-operators-j2ssl" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.261035 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c28e622-d89b-4819-b346-5cc07af83b8b-catalog-content\") pod \"community-operators-j2ssl\" (UID: \"9c28e622-d89b-4819-b346-5cc07af83b8b\") " pod="openshift-marketplace/community-operators-j2ssl" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.261064 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhw6l\" (UniqueName: \"kubernetes.io/projected/9c28e622-d89b-4819-b346-5cc07af83b8b-kube-api-access-hhw6l\") pod \"community-operators-j2ssl\" (UID: \"9c28e622-d89b-4819-b346-5cc07af83b8b\") " pod="openshift-marketplace/community-operators-j2ssl" Sep 30 00:11:38 crc kubenswrapper[4809]: E0930 00:11:38.261198 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:38.761177318 +0000 UTC m=+149.797426726 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.363243 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.363290 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c28e622-d89b-4819-b346-5cc07af83b8b-catalog-content\") pod \"community-operators-j2ssl\" (UID: \"9c28e622-d89b-4819-b346-5cc07af83b8b\") " pod="openshift-marketplace/community-operators-j2ssl" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.363311 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhw6l\" (UniqueName: \"kubernetes.io/projected/9c28e622-d89b-4819-b346-5cc07af83b8b-kube-api-access-hhw6l\") pod \"community-operators-j2ssl\" (UID: \"9c28e622-d89b-4819-b346-5cc07af83b8b\") " pod="openshift-marketplace/community-operators-j2ssl" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.363360 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c28e622-d89b-4819-b346-5cc07af83b8b-utilities\") pod \"community-operators-j2ssl\" (UID: \"9c28e622-d89b-4819-b346-5cc07af83b8b\") " pod="openshift-marketplace/community-operators-j2ssl" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.363786 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c28e622-d89b-4819-b346-5cc07af83b8b-utilities\") pod \"community-operators-j2ssl\" (UID: \"9c28e622-d89b-4819-b346-5cc07af83b8b\") " pod="openshift-marketplace/community-operators-j2ssl" Sep 30 00:11:38 crc kubenswrapper[4809]: E0930 00:11:38.364027 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:38.864014888 +0000 UTC m=+149.900264296 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.364369 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c28e622-d89b-4819-b346-5cc07af83b8b-catalog-content\") pod \"community-operators-j2ssl\" (UID: \"9c28e622-d89b-4819-b346-5cc07af83b8b\") " pod="openshift-marketplace/community-operators-j2ssl" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.385775 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hjqb8"] Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.387940 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hjqb8"] Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.391130 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hjqb8" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.395803 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.396588 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhw6l\" (UniqueName: \"kubernetes.io/projected/9c28e622-d89b-4819-b346-5cc07af83b8b-kube-api-access-hhw6l\") pod \"community-operators-j2ssl\" (UID: \"9c28e622-d89b-4819-b346-5cc07af83b8b\") " pod="openshift-marketplace/community-operators-j2ssl" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.464298 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.464504 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00358083-724a-47cb-b376-1cc3d97a9fab-catalog-content\") pod \"certified-operators-hjqb8\" (UID: \"00358083-724a-47cb-b376-1cc3d97a9fab\") " pod="openshift-marketplace/certified-operators-hjqb8" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.464570 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00358083-724a-47cb-b376-1cc3d97a9fab-utilities\") pod \"certified-operators-hjqb8\" (UID: \"00358083-724a-47cb-b376-1cc3d97a9fab\") " pod="openshift-marketplace/certified-operators-hjqb8" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.464593 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmvk6\" (UniqueName: \"kubernetes.io/projected/00358083-724a-47cb-b376-1cc3d97a9fab-kube-api-access-nmvk6\") pod \"certified-operators-hjqb8\" (UID: \"00358083-724a-47cb-b376-1cc3d97a9fab\") " pod="openshift-marketplace/certified-operators-hjqb8" Sep 30 00:11:38 crc kubenswrapper[4809]: E0930 00:11:38.464738 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:38.964721853 +0000 UTC m=+150.000971261 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.568175 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00358083-724a-47cb-b376-1cc3d97a9fab-catalog-content\") pod \"certified-operators-hjqb8\" (UID: \"00358083-724a-47cb-b376-1cc3d97a9fab\") " pod="openshift-marketplace/certified-operators-hjqb8" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.568914 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.568967 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00358083-724a-47cb-b376-1cc3d97a9fab-utilities\") pod \"certified-operators-hjqb8\" (UID: \"00358083-724a-47cb-b376-1cc3d97a9fab\") " pod="openshift-marketplace/certified-operators-hjqb8" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.568991 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmvk6\" (UniqueName: \"kubernetes.io/projected/00358083-724a-47cb-b376-1cc3d97a9fab-kube-api-access-nmvk6\") pod \"certified-operators-hjqb8\" (UID: \"00358083-724a-47cb-b376-1cc3d97a9fab\") " pod="openshift-marketplace/certified-operators-hjqb8" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.569445 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00358083-724a-47cb-b376-1cc3d97a9fab-catalog-content\") pod \"certified-operators-hjqb8\" (UID: \"00358083-724a-47cb-b376-1cc3d97a9fab\") " pod="openshift-marketplace/certified-operators-hjqb8" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.569733 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00358083-724a-47cb-b376-1cc3d97a9fab-utilities\") pod \"certified-operators-hjqb8\" (UID: \"00358083-724a-47cb-b376-1cc3d97a9fab\") " pod="openshift-marketplace/certified-operators-hjqb8" Sep 30 00:11:38 crc kubenswrapper[4809]: E0930 00:11:38.570915 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:39.070894403 +0000 UTC m=+150.107144021 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.579210 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xqgvp"] Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.580525 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xqgvp" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.597699 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmvk6\" (UniqueName: \"kubernetes.io/projected/00358083-724a-47cb-b376-1cc3d97a9fab-kube-api-access-nmvk6\") pod \"certified-operators-hjqb8\" (UID: \"00358083-724a-47cb-b376-1cc3d97a9fab\") " pod="openshift-marketplace/certified-operators-hjqb8" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.600896 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xqgvp"] Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.630116 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j2ssl" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.670116 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.670254 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/852e34d6-9b62-4f31-b9b2-78caac40540d-utilities\") pod \"community-operators-xqgvp\" (UID: \"852e34d6-9b62-4f31-b9b2-78caac40540d\") " pod="openshift-marketplace/community-operators-xqgvp" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.670292 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knn2p\" (UniqueName: \"kubernetes.io/projected/852e34d6-9b62-4f31-b9b2-78caac40540d-kube-api-access-knn2p\") pod \"community-operators-xqgvp\" (UID: \"852e34d6-9b62-4f31-b9b2-78caac40540d\") " pod="openshift-marketplace/community-operators-xqgvp" Sep 30 00:11:38 crc kubenswrapper[4809]: E0930 00:11:38.670346 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:39.170323399 +0000 UTC m=+150.206572807 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.670381 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/852e34d6-9b62-4f31-b9b2-78caac40540d-catalog-content\") pod \"community-operators-xqgvp\" (UID: \"852e34d6-9b62-4f31-b9b2-78caac40540d\") " pod="openshift-marketplace/community-operators-xqgvp" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.723612 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hjqb8" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.767473 4809 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.768563 4809 patch_prober.go:28] interesting pod/router-default-5444994796-5gnw9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 00:11:38 crc kubenswrapper[4809]: [-]has-synced failed: reason withheld Sep 30 00:11:38 crc kubenswrapper[4809]: [+]process-running ok Sep 30 00:11:38 crc kubenswrapper[4809]: healthz check failed Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.768631 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5gnw9" podUID="88723807-e4e6-48ce-9d84-a66a57863496" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.769776 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7sd88"] Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.771122 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7sd88" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.771177 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/852e34d6-9b62-4f31-b9b2-78caac40540d-utilities\") pod \"community-operators-xqgvp\" (UID: \"852e34d6-9b62-4f31-b9b2-78caac40540d\") " pod="openshift-marketplace/community-operators-xqgvp" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.771235 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.771271 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knn2p\" (UniqueName: \"kubernetes.io/projected/852e34d6-9b62-4f31-b9b2-78caac40540d-kube-api-access-knn2p\") pod \"community-operators-xqgvp\" (UID: \"852e34d6-9b62-4f31-b9b2-78caac40540d\") " pod="openshift-marketplace/community-operators-xqgvp" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.771339 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/852e34d6-9b62-4f31-b9b2-78caac40540d-catalog-content\") pod \"community-operators-xqgvp\" (UID: \"852e34d6-9b62-4f31-b9b2-78caac40540d\") " pod="openshift-marketplace/community-operators-xqgvp" Sep 30 00:11:38 crc kubenswrapper[4809]: E0930 00:11:38.771707 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 00:11:39.271693364 +0000 UTC m=+150.307942772 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lzb22" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.771871 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/852e34d6-9b62-4f31-b9b2-78caac40540d-catalog-content\") pod \"community-operators-xqgvp\" (UID: \"852e34d6-9b62-4f31-b9b2-78caac40540d\") " pod="openshift-marketplace/community-operators-xqgvp" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.773627 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/852e34d6-9b62-4f31-b9b2-78caac40540d-utilities\") pod \"community-operators-xqgvp\" (UID: \"852e34d6-9b62-4f31-b9b2-78caac40540d\") " pod="openshift-marketplace/community-operators-xqgvp" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.785606 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7sd88"] Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.805685 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knn2p\" (UniqueName: \"kubernetes.io/projected/852e34d6-9b62-4f31-b9b2-78caac40540d-kube-api-access-knn2p\") pod \"community-operators-xqgvp\" (UID: \"852e34d6-9b62-4f31-b9b2-78caac40540d\") " pod="openshift-marketplace/community-operators-xqgvp" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.861751 4809 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-09-30T00:11:38.767501817Z","Handler":null,"Name":""} Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.873002 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.873323 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b54e11f-446c-47f6-9a5c-5a15853df320-catalog-content\") pod \"certified-operators-7sd88\" (UID: \"7b54e11f-446c-47f6-9a5c-5a15853df320\") " pod="openshift-marketplace/certified-operators-7sd88" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.873367 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b54e11f-446c-47f6-9a5c-5a15853df320-utilities\") pod \"certified-operators-7sd88\" (UID: \"7b54e11f-446c-47f6-9a5c-5a15853df320\") " pod="openshift-marketplace/certified-operators-7sd88" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.873386 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hf7x7\" (UniqueName: \"kubernetes.io/projected/7b54e11f-446c-47f6-9a5c-5a15853df320-kube-api-access-hf7x7\") pod \"certified-operators-7sd88\" (UID: \"7b54e11f-446c-47f6-9a5c-5a15853df320\") " pod="openshift-marketplace/certified-operators-7sd88" Sep 30 00:11:38 crc kubenswrapper[4809]: E0930 00:11:38.873457 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 00:11:39.37344031 +0000 UTC m=+150.409689718 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.880283 4809 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.880322 4809 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.920717 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xqgvp" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.974846 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b54e11f-446c-47f6-9a5c-5a15853df320-utilities\") pod \"certified-operators-7sd88\" (UID: \"7b54e11f-446c-47f6-9a5c-5a15853df320\") " pod="openshift-marketplace/certified-operators-7sd88" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.974884 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hf7x7\" (UniqueName: \"kubernetes.io/projected/7b54e11f-446c-47f6-9a5c-5a15853df320-kube-api-access-hf7x7\") pod \"certified-operators-7sd88\" (UID: \"7b54e11f-446c-47f6-9a5c-5a15853df320\") " pod="openshift-marketplace/certified-operators-7sd88" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.974937 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.975021 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b54e11f-446c-47f6-9a5c-5a15853df320-catalog-content\") pod \"certified-operators-7sd88\" (UID: \"7b54e11f-446c-47f6-9a5c-5a15853df320\") " pod="openshift-marketplace/certified-operators-7sd88" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.975304 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b54e11f-446c-47f6-9a5c-5a15853df320-utilities\") pod \"certified-operators-7sd88\" (UID: \"7b54e11f-446c-47f6-9a5c-5a15853df320\") " pod="openshift-marketplace/certified-operators-7sd88" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.975404 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b54e11f-446c-47f6-9a5c-5a15853df320-catalog-content\") pod \"certified-operators-7sd88\" (UID: \"7b54e11f-446c-47f6-9a5c-5a15853df320\") " pod="openshift-marketplace/certified-operators-7sd88" Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.979870 4809 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 00:11:38 crc kubenswrapper[4809]: I0930 00:11:38.979937 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.001703 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hf7x7\" (UniqueName: \"kubernetes.io/projected/7b54e11f-446c-47f6-9a5c-5a15853df320-kube-api-access-hf7x7\") pod \"certified-operators-7sd88\" (UID: \"7b54e11f-446c-47f6-9a5c-5a15853df320\") " pod="openshift-marketplace/certified-operators-7sd88" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.011921 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lzb22\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.027504 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j2ssl"] Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.078905 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.091571 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hjqb8"] Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.095488 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.110333 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7sd88" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.138659 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" event={"ID":"413c828b-94ff-4866-a5ed-a533cf34a56a","Type":"ContainerStarted","Data":"2d55a4bb09ad0b1c74352e027fabc53ee0714655c6d448ea3c67738ad54d56e1"} Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.142818 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j2ssl" event={"ID":"9c28e622-d89b-4819-b346-5cc07af83b8b","Type":"ContainerStarted","Data":"8083ab2b46eebd1a24130e29c9f0760ae16a7c0c51f264d9dffe59683520393c"} Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.155820 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"a624d2fcd1baebf806dee2999e7c261648a8d475bf5f1b0785fb4d19f2435422"} Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.155857 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"0f8ede2346bb5a356b482c17e5a9ede34e137106ec80236df9761404b01baab8"} Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.156617 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.160931 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"7aa87a97bf8ab60d41a1b694af67d0591673f2499a5894cc57036bbc78105abc"} Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.160973 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"33a5622db77c581c14e95a435961c12564795ef2d183e2242e3c355780c1cbd4"} Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.162515 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hjqb8" event={"ID":"00358083-724a-47cb-b376-1cc3d97a9fab","Type":"ContainerStarted","Data":"7169470348fee3b9451cd697565912d96b15faae1ae205536f427001c6498704"} Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.165704 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"eaa695ea079d7b79ee690d5372548a6833395a8ca4837cebf8cd94a0554dfbc3"} Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.165765 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"ba00d08337ea77331a10fd38aa2546ef5df5fc67f2d2136311790635c9d77036"} Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.252423 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xqgvp"] Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.269277 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.391901 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7sd88"] Sep 30 00:11:39 crc kubenswrapper[4809]: W0930 00:11:39.413443 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b54e11f_446c_47f6_9a5c_5a15853df320.slice/crio-fa7a9f1394d6dcabf049111d0c97cbcf60b99a527ceaee01a9b596f73c183c37 WatchSource:0}: Error finding container fa7a9f1394d6dcabf049111d0c97cbcf60b99a527ceaee01a9b596f73c183c37: Status 404 returned error can't find the container with id fa7a9f1394d6dcabf049111d0c97cbcf60b99a527ceaee01a9b596f73c183c37 Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.510993 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-lzb22"] Sep 30 00:11:39 crc kubenswrapper[4809]: W0930 00:11:39.574334 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod00290c40_68ba_4728_b5fe_3ff985476bcf.slice/crio-c41804cb9eb9030d516ad4aa8fe3d7b8ee5b9dab0d303cfbdf00e9182e6ee1a5 WatchSource:0}: Error finding container c41804cb9eb9030d516ad4aa8fe3d7b8ee5b9dab0d303cfbdf00e9182e6ee1a5: Status 404 returned error can't find the container with id c41804cb9eb9030d516ad4aa8fe3d7b8ee5b9dab0d303cfbdf00e9182e6ee1a5 Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.586891 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.699874 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.762826 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.766370 4809 patch_prober.go:28] interesting pod/router-default-5444994796-5gnw9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 00:11:39 crc kubenswrapper[4809]: [-]has-synced failed: reason withheld Sep 30 00:11:39 crc kubenswrapper[4809]: [+]process-running ok Sep 30 00:11:39 crc kubenswrapper[4809]: healthz check failed Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.766443 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5gnw9" podUID="88723807-e4e6-48ce-9d84-a66a57863496" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.776256 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.777210 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.779352 4809 patch_prober.go:28] interesting pod/console-f9d7485db-gjq9t container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.5:8443/health\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.779402 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-gjq9t" podUID="513c8a3a-0885-48a4-ad5d-7d1dab1fcb05" containerName="console" probeResult="failure" output="Get \"https://10.217.0.5:8443/health\": dial tcp 10.217.0.5:8443: connect: connection refused" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.815688 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.817485 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.819258 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.820610 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.823627 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.824183 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.826126 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.841109 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.891041 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aa609401-afb6-40f1-adf0-a1a314f101fb-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"aa609401-afb6-40f1-adf0-a1a314f101fb\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.891161 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/aa609401-afb6-40f1-adf0-a1a314f101fb-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"aa609401-afb6-40f1-adf0-a1a314f101fb\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.992802 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aa609401-afb6-40f1-adf0-a1a314f101fb-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"aa609401-afb6-40f1-adf0-a1a314f101fb\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.992938 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/aa609401-afb6-40f1-adf0-a1a314f101fb-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"aa609401-afb6-40f1-adf0-a1a314f101fb\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 00:11:39 crc kubenswrapper[4809]: I0930 00:11:39.993089 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/aa609401-afb6-40f1-adf0-a1a314f101fb-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"aa609401-afb6-40f1-adf0-a1a314f101fb\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.012907 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aa609401-afb6-40f1-adf0-a1a314f101fb-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"aa609401-afb6-40f1-adf0-a1a314f101fb\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.146862 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.172212 4809 generic.go:334] "Generic (PLEG): container finished" podID="9c28e622-d89b-4819-b346-5cc07af83b8b" containerID="d532a198802e52e25985eb30ae8892859295dce6e8f4089c7ec4821401e9c47d" exitCode=0 Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.172293 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j2ssl" event={"ID":"9c28e622-d89b-4819-b346-5cc07af83b8b","Type":"ContainerDied","Data":"d532a198802e52e25985eb30ae8892859295dce6e8f4089c7ec4821401e9c47d"} Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.173971 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.182446 4809 generic.go:334] "Generic (PLEG): container finished" podID="7b54e11f-446c-47f6-9a5c-5a15853df320" containerID="570a547c4439b8a8258ab742da1d9c1f56ed3e8b6a349b4b031e6322b4c02052" exitCode=0 Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.182537 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7sd88" event={"ID":"7b54e11f-446c-47f6-9a5c-5a15853df320","Type":"ContainerDied","Data":"570a547c4439b8a8258ab742da1d9c1f56ed3e8b6a349b4b031e6322b4c02052"} Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.182569 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7sd88" event={"ID":"7b54e11f-446c-47f6-9a5c-5a15853df320","Type":"ContainerStarted","Data":"fa7a9f1394d6dcabf049111d0c97cbcf60b99a527ceaee01a9b596f73c183c37"} Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.186215 4809 generic.go:334] "Generic (PLEG): container finished" podID="00358083-724a-47cb-b376-1cc3d97a9fab" containerID="6b55ced28742dec7e21edc8906e00c76699bc7a6ccec6428dcda8a8367e14960" exitCode=0 Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.186298 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hjqb8" event={"ID":"00358083-724a-47cb-b376-1cc3d97a9fab","Type":"ContainerDied","Data":"6b55ced28742dec7e21edc8906e00c76699bc7a6ccec6428dcda8a8367e14960"} Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.193270 4809 generic.go:334] "Generic (PLEG): container finished" podID="960a6573-c6e4-49fe-9aff-0c9b43435215" containerID="8758d9baefb97fe777fe1c85b8a5e1569b491929745518fe92cb5ac1e892ea06" exitCode=0 Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.193403 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v" event={"ID":"960a6573-c6e4-49fe-9aff-0c9b43435215","Type":"ContainerDied","Data":"8758d9baefb97fe777fe1c85b8a5e1569b491929745518fe92cb5ac1e892ea06"} Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.201852 4809 generic.go:334] "Generic (PLEG): container finished" podID="852e34d6-9b62-4f31-b9b2-78caac40540d" containerID="1ebf9a9ac0e26466ce68d52dce9498e508c99ad02e5ac09927568e53bbc8b906" exitCode=0 Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.201954 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqgvp" event={"ID":"852e34d6-9b62-4f31-b9b2-78caac40540d","Type":"ContainerDied","Data":"1ebf9a9ac0e26466ce68d52dce9498e508c99ad02e5ac09927568e53bbc8b906"} Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.201982 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqgvp" event={"ID":"852e34d6-9b62-4f31-b9b2-78caac40540d","Type":"ContainerStarted","Data":"65f5ccbd351d56ebfbdd8ad9f2cc96d0831bcc008821120ce43f443be0881254"} Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.214155 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" event={"ID":"00290c40-68ba-4728-b5fe-3ff985476bcf","Type":"ContainerStarted","Data":"d20fef6941a572fa3b135c63bf568331e85d4e4292ab2ce69fec9a1bd4205b23"} Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.214236 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" event={"ID":"00290c40-68ba-4728-b5fe-3ff985476bcf","Type":"ContainerStarted","Data":"c41804cb9eb9030d516ad4aa8fe3d7b8ee5b9dab0d303cfbdf00e9182e6ee1a5"} Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.215264 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.238236 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" event={"ID":"413c828b-94ff-4866-a5ed-a533cf34a56a","Type":"ContainerStarted","Data":"405111bfb0d07e656649874319a271d25d03755925930777be76d305dc779b6c"} Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.258745 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-962sz" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.316124 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" podStartSLOduration=129.316106651 podStartE2EDuration="2m9.316106651s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:40.313129751 +0000 UTC m=+151.349379159" watchObservedRunningTime="2025-09-30 00:11:40.316106651 +0000 UTC m=+151.352356059" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.383764 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-l74tx"] Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.385635 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l74tx" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.389016 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.391369 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-7ct6f" podStartSLOduration=12.391343573 podStartE2EDuration="12.391343573s" podCreationTimestamp="2025-09-30 00:11:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:40.381442022 +0000 UTC m=+151.417691430" watchObservedRunningTime="2025-09-30 00:11:40.391343573 +0000 UTC m=+151.427592981" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.421852 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l74tx"] Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.504328 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31398a45-e7dc-4b71-81ac-0fc5c16b9c26-utilities\") pod \"redhat-marketplace-l74tx\" (UID: \"31398a45-e7dc-4b71-81ac-0fc5c16b9c26\") " pod="openshift-marketplace/redhat-marketplace-l74tx" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.504404 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hb576\" (UniqueName: \"kubernetes.io/projected/31398a45-e7dc-4b71-81ac-0fc5c16b9c26-kube-api-access-hb576\") pod \"redhat-marketplace-l74tx\" (UID: \"31398a45-e7dc-4b71-81ac-0fc5c16b9c26\") " pod="openshift-marketplace/redhat-marketplace-l74tx" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.504488 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31398a45-e7dc-4b71-81ac-0fc5c16b9c26-catalog-content\") pod \"redhat-marketplace-l74tx\" (UID: \"31398a45-e7dc-4b71-81ac-0fc5c16b9c26\") " pod="openshift-marketplace/redhat-marketplace-l74tx" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.605726 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31398a45-e7dc-4b71-81ac-0fc5c16b9c26-catalog-content\") pod \"redhat-marketplace-l74tx\" (UID: \"31398a45-e7dc-4b71-81ac-0fc5c16b9c26\") " pod="openshift-marketplace/redhat-marketplace-l74tx" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.609020 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31398a45-e7dc-4b71-81ac-0fc5c16b9c26-utilities\") pod \"redhat-marketplace-l74tx\" (UID: \"31398a45-e7dc-4b71-81ac-0fc5c16b9c26\") " pod="openshift-marketplace/redhat-marketplace-l74tx" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.609164 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hb576\" (UniqueName: \"kubernetes.io/projected/31398a45-e7dc-4b71-81ac-0fc5c16b9c26-kube-api-access-hb576\") pod \"redhat-marketplace-l74tx\" (UID: \"31398a45-e7dc-4b71-81ac-0fc5c16b9c26\") " pod="openshift-marketplace/redhat-marketplace-l74tx" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.609223 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31398a45-e7dc-4b71-81ac-0fc5c16b9c26-catalog-content\") pod \"redhat-marketplace-l74tx\" (UID: \"31398a45-e7dc-4b71-81ac-0fc5c16b9c26\") " pod="openshift-marketplace/redhat-marketplace-l74tx" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.609468 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31398a45-e7dc-4b71-81ac-0fc5c16b9c26-utilities\") pod \"redhat-marketplace-l74tx\" (UID: \"31398a45-e7dc-4b71-81ac-0fc5c16b9c26\") " pod="openshift-marketplace/redhat-marketplace-l74tx" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.629909 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.633311 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hb576\" (UniqueName: \"kubernetes.io/projected/31398a45-e7dc-4b71-81ac-0fc5c16b9c26-kube-api-access-hb576\") pod \"redhat-marketplace-l74tx\" (UID: \"31398a45-e7dc-4b71-81ac-0fc5c16b9c26\") " pod="openshift-marketplace/redhat-marketplace-l74tx" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.756155 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l74tx" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.768312 4809 patch_prober.go:28] interesting pod/router-default-5444994796-5gnw9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 00:11:40 crc kubenswrapper[4809]: [-]has-synced failed: reason withheld Sep 30 00:11:40 crc kubenswrapper[4809]: [+]process-running ok Sep 30 00:11:40 crc kubenswrapper[4809]: healthz check failed Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.768377 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5gnw9" podUID="88723807-e4e6-48ce-9d84-a66a57863496" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.769840 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mhgdc"] Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.771274 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mhgdc" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.787466 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mhgdc"] Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.913593 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrmh2\" (UniqueName: \"kubernetes.io/projected/799d1bcb-02dd-4597-a154-3d3ce7b877db-kube-api-access-vrmh2\") pod \"redhat-marketplace-mhgdc\" (UID: \"799d1bcb-02dd-4597-a154-3d3ce7b877db\") " pod="openshift-marketplace/redhat-marketplace-mhgdc" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.913690 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/799d1bcb-02dd-4597-a154-3d3ce7b877db-utilities\") pod \"redhat-marketplace-mhgdc\" (UID: \"799d1bcb-02dd-4597-a154-3d3ce7b877db\") " pod="openshift-marketplace/redhat-marketplace-mhgdc" Sep 30 00:11:40 crc kubenswrapper[4809]: I0930 00:11:40.913743 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/799d1bcb-02dd-4597-a154-3d3ce7b877db-catalog-content\") pod \"redhat-marketplace-mhgdc\" (UID: \"799d1bcb-02dd-4597-a154-3d3ce7b877db\") " pod="openshift-marketplace/redhat-marketplace-mhgdc" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.015557 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrmh2\" (UniqueName: \"kubernetes.io/projected/799d1bcb-02dd-4597-a154-3d3ce7b877db-kube-api-access-vrmh2\") pod \"redhat-marketplace-mhgdc\" (UID: \"799d1bcb-02dd-4597-a154-3d3ce7b877db\") " pod="openshift-marketplace/redhat-marketplace-mhgdc" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.016036 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/799d1bcb-02dd-4597-a154-3d3ce7b877db-utilities\") pod \"redhat-marketplace-mhgdc\" (UID: \"799d1bcb-02dd-4597-a154-3d3ce7b877db\") " pod="openshift-marketplace/redhat-marketplace-mhgdc" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.016054 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/799d1bcb-02dd-4597-a154-3d3ce7b877db-catalog-content\") pod \"redhat-marketplace-mhgdc\" (UID: \"799d1bcb-02dd-4597-a154-3d3ce7b877db\") " pod="openshift-marketplace/redhat-marketplace-mhgdc" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.016553 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/799d1bcb-02dd-4597-a154-3d3ce7b877db-utilities\") pod \"redhat-marketplace-mhgdc\" (UID: \"799d1bcb-02dd-4597-a154-3d3ce7b877db\") " pod="openshift-marketplace/redhat-marketplace-mhgdc" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.016599 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/799d1bcb-02dd-4597-a154-3d3ce7b877db-catalog-content\") pod \"redhat-marketplace-mhgdc\" (UID: \"799d1bcb-02dd-4597-a154-3d3ce7b877db\") " pod="openshift-marketplace/redhat-marketplace-mhgdc" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.032387 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l74tx"] Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.036899 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrmh2\" (UniqueName: \"kubernetes.io/projected/799d1bcb-02dd-4597-a154-3d3ce7b877db-kube-api-access-vrmh2\") pod \"redhat-marketplace-mhgdc\" (UID: \"799d1bcb-02dd-4597-a154-3d3ce7b877db\") " pod="openshift-marketplace/redhat-marketplace-mhgdc" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.137901 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mhgdc" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.248767 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"aa609401-afb6-40f1-adf0-a1a314f101fb","Type":"ContainerStarted","Data":"9579cf78021822101f1b087aeff95af914a30bbe6a85232ac052d4cf8ec66ffd"} Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.248810 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"aa609401-afb6-40f1-adf0-a1a314f101fb","Type":"ContainerStarted","Data":"0f2f57f04dab7717809e805dec80d131779d57e460944c04a2be4b94a8ab5b1e"} Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.250788 4809 generic.go:334] "Generic (PLEG): container finished" podID="31398a45-e7dc-4b71-81ac-0fc5c16b9c26" containerID="54dec868cc1480cdea0fb9400b612e1c93c150c88b0dc8537f67d30efb13b565" exitCode=0 Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.250935 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l74tx" event={"ID":"31398a45-e7dc-4b71-81ac-0fc5c16b9c26","Type":"ContainerDied","Data":"54dec868cc1480cdea0fb9400b612e1c93c150c88b0dc8537f67d30efb13b565"} Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.251015 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l74tx" event={"ID":"31398a45-e7dc-4b71-81ac-0fc5c16b9c26","Type":"ContainerStarted","Data":"072b5de555a53a1641e40b0fbf3bb5c38e268082de986ff06e74a47c943cc429"} Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.268663 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.268618893 podStartE2EDuration="2.268618893s" podCreationTimestamp="2025-09-30 00:11:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:41.26653749 +0000 UTC m=+152.302786908" watchObservedRunningTime="2025-09-30 00:11:41.268618893 +0000 UTC m=+152.304868311" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.339517 4809 patch_prober.go:28] interesting pod/downloads-7954f5f757-bc4wl container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.339583 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-bc4wl" podUID="64add8f2-553b-45fe-84d6-6123c46532c3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.342605 4809 patch_prober.go:28] interesting pod/downloads-7954f5f757-bc4wl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.342682 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-bc4wl" podUID="64add8f2-553b-45fe-84d6-6123c46532c3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.378977 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8w9pr"] Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.386899 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8w9pr"] Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.387122 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8w9pr" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.391240 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.432387 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mhgdc"] Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.528294 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2-utilities\") pod \"redhat-operators-8w9pr\" (UID: \"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2\") " pod="openshift-marketplace/redhat-operators-8w9pr" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.528362 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbgvx\" (UniqueName: \"kubernetes.io/projected/a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2-kube-api-access-cbgvx\") pod \"redhat-operators-8w9pr\" (UID: \"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2\") " pod="openshift-marketplace/redhat-operators-8w9pr" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.528393 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2-catalog-content\") pod \"redhat-operators-8w9pr\" (UID: \"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2\") " pod="openshift-marketplace/redhat-operators-8w9pr" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.553971 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.629097 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/960a6573-c6e4-49fe-9aff-0c9b43435215-secret-volume\") pod \"960a6573-c6e4-49fe-9aff-0c9b43435215\" (UID: \"960a6573-c6e4-49fe-9aff-0c9b43435215\") " Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.630268 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/960a6573-c6e4-49fe-9aff-0c9b43435215-config-volume\") pod \"960a6573-c6e4-49fe-9aff-0c9b43435215\" (UID: \"960a6573-c6e4-49fe-9aff-0c9b43435215\") " Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.630363 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l2g9s\" (UniqueName: \"kubernetes.io/projected/960a6573-c6e4-49fe-9aff-0c9b43435215-kube-api-access-l2g9s\") pod \"960a6573-c6e4-49fe-9aff-0c9b43435215\" (UID: \"960a6573-c6e4-49fe-9aff-0c9b43435215\") " Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.630585 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbgvx\" (UniqueName: \"kubernetes.io/projected/a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2-kube-api-access-cbgvx\") pod \"redhat-operators-8w9pr\" (UID: \"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2\") " pod="openshift-marketplace/redhat-operators-8w9pr" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.630630 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2-catalog-content\") pod \"redhat-operators-8w9pr\" (UID: \"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2\") " pod="openshift-marketplace/redhat-operators-8w9pr" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.630855 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2-utilities\") pod \"redhat-operators-8w9pr\" (UID: \"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2\") " pod="openshift-marketplace/redhat-operators-8w9pr" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.631321 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2-catalog-content\") pod \"redhat-operators-8w9pr\" (UID: \"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2\") " pod="openshift-marketplace/redhat-operators-8w9pr" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.631701 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/960a6573-c6e4-49fe-9aff-0c9b43435215-config-volume" (OuterVolumeSpecName: "config-volume") pod "960a6573-c6e4-49fe-9aff-0c9b43435215" (UID: "960a6573-c6e4-49fe-9aff-0c9b43435215"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.632447 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2-utilities\") pod \"redhat-operators-8w9pr\" (UID: \"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2\") " pod="openshift-marketplace/redhat-operators-8w9pr" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.635813 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/960a6573-c6e4-49fe-9aff-0c9b43435215-kube-api-access-l2g9s" (OuterVolumeSpecName: "kube-api-access-l2g9s") pod "960a6573-c6e4-49fe-9aff-0c9b43435215" (UID: "960a6573-c6e4-49fe-9aff-0c9b43435215"). InnerVolumeSpecName "kube-api-access-l2g9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.643348 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/960a6573-c6e4-49fe-9aff-0c9b43435215-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "960a6573-c6e4-49fe-9aff-0c9b43435215" (UID: "960a6573-c6e4-49fe-9aff-0c9b43435215"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.650909 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbgvx\" (UniqueName: \"kubernetes.io/projected/a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2-kube-api-access-cbgvx\") pod \"redhat-operators-8w9pr\" (UID: \"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2\") " pod="openshift-marketplace/redhat-operators-8w9pr" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.706623 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8w9pr" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.732658 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l2g9s\" (UniqueName: \"kubernetes.io/projected/960a6573-c6e4-49fe-9aff-0c9b43435215-kube-api-access-l2g9s\") on node \"crc\" DevicePath \"\"" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.732686 4809 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/960a6573-c6e4-49fe-9aff-0c9b43435215-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.732695 4809 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/960a6573-c6e4-49fe-9aff-0c9b43435215-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.769207 4809 patch_prober.go:28] interesting pod/router-default-5444994796-5gnw9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 00:11:41 crc kubenswrapper[4809]: [-]has-synced failed: reason withheld Sep 30 00:11:41 crc kubenswrapper[4809]: [+]process-running ok Sep 30 00:11:41 crc kubenswrapper[4809]: healthz check failed Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.769282 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5gnw9" podUID="88723807-e4e6-48ce-9d84-a66a57863496" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.787951 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-r48zg"] Sep 30 00:11:41 crc kubenswrapper[4809]: E0930 00:11:41.788170 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="960a6573-c6e4-49fe-9aff-0c9b43435215" containerName="collect-profiles" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.788181 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="960a6573-c6e4-49fe-9aff-0c9b43435215" containerName="collect-profiles" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.788283 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="960a6573-c6e4-49fe-9aff-0c9b43435215" containerName="collect-profiles" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.789003 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r48zg" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.803224 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r48zg"] Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.935723 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwqrg\" (UniqueName: \"kubernetes.io/projected/558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774-kube-api-access-xwqrg\") pod \"redhat-operators-r48zg\" (UID: \"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774\") " pod="openshift-marketplace/redhat-operators-r48zg" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.935973 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774-utilities\") pod \"redhat-operators-r48zg\" (UID: \"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774\") " pod="openshift-marketplace/redhat-operators-r48zg" Sep 30 00:11:41 crc kubenswrapper[4809]: I0930 00:11:41.936019 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774-catalog-content\") pod \"redhat-operators-r48zg\" (UID: \"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774\") " pod="openshift-marketplace/redhat-operators-r48zg" Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.041723 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774-catalog-content\") pod \"redhat-operators-r48zg\" (UID: \"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774\") " pod="openshift-marketplace/redhat-operators-r48zg" Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.042208 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwqrg\" (UniqueName: \"kubernetes.io/projected/558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774-kube-api-access-xwqrg\") pod \"redhat-operators-r48zg\" (UID: \"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774\") " pod="openshift-marketplace/redhat-operators-r48zg" Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.042260 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774-utilities\") pod \"redhat-operators-r48zg\" (UID: \"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774\") " pod="openshift-marketplace/redhat-operators-r48zg" Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.042875 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774-utilities\") pod \"redhat-operators-r48zg\" (UID: \"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774\") " pod="openshift-marketplace/redhat-operators-r48zg" Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.052238 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774-catalog-content\") pod \"redhat-operators-r48zg\" (UID: \"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774\") " pod="openshift-marketplace/redhat-operators-r48zg" Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.060791 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwqrg\" (UniqueName: \"kubernetes.io/projected/558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774-kube-api-access-xwqrg\") pod \"redhat-operators-r48zg\" (UID: \"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774\") " pod="openshift-marketplace/redhat-operators-r48zg" Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.128533 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r48zg" Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.132266 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8w9pr"] Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.146259 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.147207 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.159111 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.159405 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Sep 30 00:11:42 crc kubenswrapper[4809]: W0930 00:11:42.164868 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda5f8e57a_8ee9_4443_a31f_221ed6ad3ad2.slice/crio-786100e9867dcf415b524ae36826841a74a3c4bb9f11fb48cd64050d22827bd2 WatchSource:0}: Error finding container 786100e9867dcf415b524ae36826841a74a3c4bb9f11fb48cd64050d22827bd2: Status 404 returned error can't find the container with id 786100e9867dcf415b524ae36826841a74a3c4bb9f11fb48cd64050d22827bd2 Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.164878 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.246578 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/59c82a04-264a-4f2f-b830-b8559fd743d6-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"59c82a04-264a-4f2f-b830-b8559fd743d6\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.246623 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/59c82a04-264a-4f2f-b830-b8559fd743d6-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"59c82a04-264a-4f2f-b830-b8559fd743d6\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.265620 4809 generic.go:334] "Generic (PLEG): container finished" podID="799d1bcb-02dd-4597-a154-3d3ce7b877db" containerID="3d3520904d40e4be1c67765b1675fc02569bb73d570f0c600e93df73862c9a5a" exitCode=0 Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.265708 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mhgdc" event={"ID":"799d1bcb-02dd-4597-a154-3d3ce7b877db","Type":"ContainerDied","Data":"3d3520904d40e4be1c67765b1675fc02569bb73d570f0c600e93df73862c9a5a"} Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.265774 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mhgdc" event={"ID":"799d1bcb-02dd-4597-a154-3d3ce7b877db","Type":"ContainerStarted","Data":"bb392c35b9fe79a27120f08d4e07a92cc77bd83485a726096e37a9bfc7d8ca62"} Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.268086 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v" event={"ID":"960a6573-c6e4-49fe-9aff-0c9b43435215","Type":"ContainerDied","Data":"6d7dd9f6c354897bac28cd7a05f7e0268563c3e4e352b32d4fa07a27c419c0b0"} Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.268123 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d7dd9f6c354897bac28cd7a05f7e0268563c3e4e352b32d4fa07a27c419c0b0" Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.268100 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v" Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.287042 4809 generic.go:334] "Generic (PLEG): container finished" podID="aa609401-afb6-40f1-adf0-a1a314f101fb" containerID="9579cf78021822101f1b087aeff95af914a30bbe6a85232ac052d4cf8ec66ffd" exitCode=0 Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.287126 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"aa609401-afb6-40f1-adf0-a1a314f101fb","Type":"ContainerDied","Data":"9579cf78021822101f1b087aeff95af914a30bbe6a85232ac052d4cf8ec66ffd"} Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.301510 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8w9pr" event={"ID":"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2","Type":"ContainerStarted","Data":"786100e9867dcf415b524ae36826841a74a3c4bb9f11fb48cd64050d22827bd2"} Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.347828 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/59c82a04-264a-4f2f-b830-b8559fd743d6-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"59c82a04-264a-4f2f-b830-b8559fd743d6\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.347886 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/59c82a04-264a-4f2f-b830-b8559fd743d6-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"59c82a04-264a-4f2f-b830-b8559fd743d6\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.348799 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/59c82a04-264a-4f2f-b830-b8559fd743d6-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"59c82a04-264a-4f2f-b830-b8559fd743d6\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.369928 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/59c82a04-264a-4f2f-b830-b8559fd743d6-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"59c82a04-264a-4f2f-b830-b8559fd743d6\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.498731 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.550634 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r48zg"] Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.765868 4809 patch_prober.go:28] interesting pod/router-default-5444994796-5gnw9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 00:11:42 crc kubenswrapper[4809]: [-]has-synced failed: reason withheld Sep 30 00:11:42 crc kubenswrapper[4809]: [+]process-running ok Sep 30 00:11:42 crc kubenswrapper[4809]: healthz check failed Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.766370 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5gnw9" podUID="88723807-e4e6-48ce-9d84-a66a57863496" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 00:11:42 crc kubenswrapper[4809]: I0930 00:11:42.983220 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 00:11:43 crc kubenswrapper[4809]: W0930 00:11:43.047254 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod59c82a04_264a_4f2f_b830_b8559fd743d6.slice/crio-8e7ad51bf810da444b1682ca8067a31bd0a7eff67db179818390080678c69240 WatchSource:0}: Error finding container 8e7ad51bf810da444b1682ca8067a31bd0a7eff67db179818390080678c69240: Status 404 returned error can't find the container with id 8e7ad51bf810da444b1682ca8067a31bd0a7eff67db179818390080678c69240 Sep 30 00:11:43 crc kubenswrapper[4809]: I0930 00:11:43.311845 4809 generic.go:334] "Generic (PLEG): container finished" podID="a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2" containerID="ce83333ce196eb6ed46c047f1080d71842c3047f59cf1f8d3d94216b829363aa" exitCode=0 Sep 30 00:11:43 crc kubenswrapper[4809]: I0930 00:11:43.311910 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8w9pr" event={"ID":"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2","Type":"ContainerDied","Data":"ce83333ce196eb6ed46c047f1080d71842c3047f59cf1f8d3d94216b829363aa"} Sep 30 00:11:43 crc kubenswrapper[4809]: I0930 00:11:43.319224 4809 generic.go:334] "Generic (PLEG): container finished" podID="558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774" containerID="10d8032fee5752edb87df74b2b1e49239d1e05156707b5a6abc2490a5ac648c0" exitCode=0 Sep 30 00:11:43 crc kubenswrapper[4809]: I0930 00:11:43.319296 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r48zg" event={"ID":"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774","Type":"ContainerDied","Data":"10d8032fee5752edb87df74b2b1e49239d1e05156707b5a6abc2490a5ac648c0"} Sep 30 00:11:43 crc kubenswrapper[4809]: I0930 00:11:43.319589 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r48zg" event={"ID":"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774","Type":"ContainerStarted","Data":"4ae7c2f4ccc900ec397bfc470f8b6eb601e054cac6dfceaf2be7850ab162e1b5"} Sep 30 00:11:43 crc kubenswrapper[4809]: I0930 00:11:43.322759 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"59c82a04-264a-4f2f-b830-b8559fd743d6","Type":"ContainerStarted","Data":"8e7ad51bf810da444b1682ca8067a31bd0a7eff67db179818390080678c69240"} Sep 30 00:11:43 crc kubenswrapper[4809]: I0930 00:11:43.742158 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 00:11:43 crc kubenswrapper[4809]: I0930 00:11:43.765820 4809 patch_prober.go:28] interesting pod/router-default-5444994796-5gnw9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 00:11:43 crc kubenswrapper[4809]: [-]has-synced failed: reason withheld Sep 30 00:11:43 crc kubenswrapper[4809]: [+]process-running ok Sep 30 00:11:43 crc kubenswrapper[4809]: healthz check failed Sep 30 00:11:43 crc kubenswrapper[4809]: I0930 00:11:43.765867 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5gnw9" podUID="88723807-e4e6-48ce-9d84-a66a57863496" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 00:11:43 crc kubenswrapper[4809]: I0930 00:11:43.872144 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/aa609401-afb6-40f1-adf0-a1a314f101fb-kubelet-dir\") pod \"aa609401-afb6-40f1-adf0-a1a314f101fb\" (UID: \"aa609401-afb6-40f1-adf0-a1a314f101fb\") " Sep 30 00:11:43 crc kubenswrapper[4809]: I0930 00:11:43.872196 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aa609401-afb6-40f1-adf0-a1a314f101fb-kube-api-access\") pod \"aa609401-afb6-40f1-adf0-a1a314f101fb\" (UID: \"aa609401-afb6-40f1-adf0-a1a314f101fb\") " Sep 30 00:11:43 crc kubenswrapper[4809]: I0930 00:11:43.872263 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/aa609401-afb6-40f1-adf0-a1a314f101fb-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "aa609401-afb6-40f1-adf0-a1a314f101fb" (UID: "aa609401-afb6-40f1-adf0-a1a314f101fb"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:11:43 crc kubenswrapper[4809]: I0930 00:11:43.872444 4809 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/aa609401-afb6-40f1-adf0-a1a314f101fb-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 30 00:11:43 crc kubenswrapper[4809]: I0930 00:11:43.882434 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa609401-afb6-40f1-adf0-a1a314f101fb-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "aa609401-afb6-40f1-adf0-a1a314f101fb" (UID: "aa609401-afb6-40f1-adf0-a1a314f101fb"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:11:43 crc kubenswrapper[4809]: I0930 00:11:43.973240 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aa609401-afb6-40f1-adf0-a1a314f101fb-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 00:11:44 crc kubenswrapper[4809]: I0930 00:11:44.336592 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"59c82a04-264a-4f2f-b830-b8559fd743d6","Type":"ContainerStarted","Data":"b0e0ee28cd86f8aa6de90da39efa0e46331b77c92165cc84964df2570b8975be"} Sep 30 00:11:44 crc kubenswrapper[4809]: I0930 00:11:44.353329 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.353307671 podStartE2EDuration="2.353307671s" podCreationTimestamp="2025-09-30 00:11:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:11:44.349223688 +0000 UTC m=+155.385473096" watchObservedRunningTime="2025-09-30 00:11:44.353307671 +0000 UTC m=+155.389557079" Sep 30 00:11:44 crc kubenswrapper[4809]: I0930 00:11:44.357044 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"aa609401-afb6-40f1-adf0-a1a314f101fb","Type":"ContainerDied","Data":"0f2f57f04dab7717809e805dec80d131779d57e460944c04a2be4b94a8ab5b1e"} Sep 30 00:11:44 crc kubenswrapper[4809]: I0930 00:11:44.357085 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f2f57f04dab7717809e805dec80d131779d57e460944c04a2be4b94a8ab5b1e" Sep 30 00:11:44 crc kubenswrapper[4809]: I0930 00:11:44.357106 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 00:11:44 crc kubenswrapper[4809]: I0930 00:11:44.765288 4809 patch_prober.go:28] interesting pod/router-default-5444994796-5gnw9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 00:11:44 crc kubenswrapper[4809]: [-]has-synced failed: reason withheld Sep 30 00:11:44 crc kubenswrapper[4809]: [+]process-running ok Sep 30 00:11:44 crc kubenswrapper[4809]: healthz check failed Sep 30 00:11:44 crc kubenswrapper[4809]: I0930 00:11:44.765374 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5gnw9" podUID="88723807-e4e6-48ce-9d84-a66a57863496" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 00:11:45 crc kubenswrapper[4809]: I0930 00:11:45.364145 4809 generic.go:334] "Generic (PLEG): container finished" podID="59c82a04-264a-4f2f-b830-b8559fd743d6" containerID="b0e0ee28cd86f8aa6de90da39efa0e46331b77c92165cc84964df2570b8975be" exitCode=0 Sep 30 00:11:45 crc kubenswrapper[4809]: I0930 00:11:45.364272 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"59c82a04-264a-4f2f-b830-b8559fd743d6","Type":"ContainerDied","Data":"b0e0ee28cd86f8aa6de90da39efa0e46331b77c92165cc84964df2570b8975be"} Sep 30 00:11:45 crc kubenswrapper[4809]: I0930 00:11:45.765147 4809 patch_prober.go:28] interesting pod/router-default-5444994796-5gnw9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 00:11:45 crc kubenswrapper[4809]: [-]has-synced failed: reason withheld Sep 30 00:11:45 crc kubenswrapper[4809]: [+]process-running ok Sep 30 00:11:45 crc kubenswrapper[4809]: healthz check failed Sep 30 00:11:45 crc kubenswrapper[4809]: I0930 00:11:45.765204 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5gnw9" podUID="88723807-e4e6-48ce-9d84-a66a57863496" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 00:11:46 crc kubenswrapper[4809]: I0930 00:11:46.766398 4809 patch_prober.go:28] interesting pod/router-default-5444994796-5gnw9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 00:11:46 crc kubenswrapper[4809]: [-]has-synced failed: reason withheld Sep 30 00:11:46 crc kubenswrapper[4809]: [+]process-running ok Sep 30 00:11:46 crc kubenswrapper[4809]: healthz check failed Sep 30 00:11:46 crc kubenswrapper[4809]: I0930 00:11:46.766945 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5gnw9" podUID="88723807-e4e6-48ce-9d84-a66a57863496" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 00:11:46 crc kubenswrapper[4809]: I0930 00:11:46.831216 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-cz7bk" Sep 30 00:11:47 crc kubenswrapper[4809]: I0930 00:11:47.765733 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:47 crc kubenswrapper[4809]: I0930 00:11:47.767962 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-5gnw9" Sep 30 00:11:49 crc kubenswrapper[4809]: I0930 00:11:49.776457 4809 patch_prober.go:28] interesting pod/console-f9d7485db-gjq9t container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.5:8443/health\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Sep 30 00:11:49 crc kubenswrapper[4809]: I0930 00:11:49.776831 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-gjq9t" podUID="513c8a3a-0885-48a4-ad5d-7d1dab1fcb05" containerName="console" probeResult="failure" output="Get \"https://10.217.0.5:8443/health\": dial tcp 10.217.0.5:8443: connect: connection refused" Sep 30 00:11:51 crc kubenswrapper[4809]: I0930 00:11:51.358058 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-bc4wl" Sep 30 00:11:53 crc kubenswrapper[4809]: I0930 00:11:53.025481 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 00:11:53 crc kubenswrapper[4809]: I0930 00:11:53.111821 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/59c82a04-264a-4f2f-b830-b8559fd743d6-kube-api-access\") pod \"59c82a04-264a-4f2f-b830-b8559fd743d6\" (UID: \"59c82a04-264a-4f2f-b830-b8559fd743d6\") " Sep 30 00:11:53 crc kubenswrapper[4809]: I0930 00:11:53.112074 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/59c82a04-264a-4f2f-b830-b8559fd743d6-kubelet-dir\") pod \"59c82a04-264a-4f2f-b830-b8559fd743d6\" (UID: \"59c82a04-264a-4f2f-b830-b8559fd743d6\") " Sep 30 00:11:53 crc kubenswrapper[4809]: I0930 00:11:53.112198 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/59c82a04-264a-4f2f-b830-b8559fd743d6-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "59c82a04-264a-4f2f-b830-b8559fd743d6" (UID: "59c82a04-264a-4f2f-b830-b8559fd743d6"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:11:53 crc kubenswrapper[4809]: I0930 00:11:53.112347 4809 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/59c82a04-264a-4f2f-b830-b8559fd743d6-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 30 00:11:53 crc kubenswrapper[4809]: I0930 00:11:53.117514 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59c82a04-264a-4f2f-b830-b8559fd743d6-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "59c82a04-264a-4f2f-b830-b8559fd743d6" (UID: "59c82a04-264a-4f2f-b830-b8559fd743d6"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:11:53 crc kubenswrapper[4809]: I0930 00:11:53.213822 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/59c82a04-264a-4f2f-b830-b8559fd743d6-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 00:11:53 crc kubenswrapper[4809]: I0930 00:11:53.413441 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"59c82a04-264a-4f2f-b830-b8559fd743d6","Type":"ContainerDied","Data":"8e7ad51bf810da444b1682ca8067a31bd0a7eff67db179818390080678c69240"} Sep 30 00:11:53 crc kubenswrapper[4809]: I0930 00:11:53.413476 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e7ad51bf810da444b1682ca8067a31bd0a7eff67db179818390080678c69240" Sep 30 00:11:53 crc kubenswrapper[4809]: I0930 00:11:53.413491 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 00:11:54 crc kubenswrapper[4809]: I0930 00:11:54.333219 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs\") pod \"network-metrics-daemon-4ktzq\" (UID: \"43a82899-34fa-4d03-9856-7303839926c2\") " pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:11:54 crc kubenswrapper[4809]: I0930 00:11:54.337800 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/43a82899-34fa-4d03-9856-7303839926c2-metrics-certs\") pod \"network-metrics-daemon-4ktzq\" (UID: \"43a82899-34fa-4d03-9856-7303839926c2\") " pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:11:54 crc kubenswrapper[4809]: I0930 00:11:54.605852 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4ktzq" Sep 30 00:11:55 crc kubenswrapper[4809]: I0930 00:11:55.325049 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:11:55 crc kubenswrapper[4809]: I0930 00:11:55.325126 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:11:59 crc kubenswrapper[4809]: I0930 00:11:59.291406 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:12:00 crc kubenswrapper[4809]: I0930 00:12:00.241051 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:12:00 crc kubenswrapper[4809]: I0930 00:12:00.246440 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:12:04 crc kubenswrapper[4809]: I0930 00:12:04.488963 4809 generic.go:334] "Generic (PLEG): container finished" podID="62227ec8-217f-461a-8116-079b9466a726" containerID="8688133dbb6713a4c7e89013d741c1a2f66663972f6ae187033a4320b0caa80e" exitCode=0 Sep 30 00:12:04 crc kubenswrapper[4809]: I0930 00:12:04.489104 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29319840-n77h7" event={"ID":"62227ec8-217f-461a-8116-079b9466a726","Type":"ContainerDied","Data":"8688133dbb6713a4c7e89013d741c1a2f66663972f6ae187033a4320b0caa80e"} Sep 30 00:12:06 crc kubenswrapper[4809]: I0930 00:12:06.811604 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29319840-n77h7" Sep 30 00:12:07 crc kubenswrapper[4809]: I0930 00:12:07.011391 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kll62\" (UniqueName: \"kubernetes.io/projected/62227ec8-217f-461a-8116-079b9466a726-kube-api-access-kll62\") pod \"62227ec8-217f-461a-8116-079b9466a726\" (UID: \"62227ec8-217f-461a-8116-079b9466a726\") " Sep 30 00:12:07 crc kubenswrapper[4809]: I0930 00:12:07.011631 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/62227ec8-217f-461a-8116-079b9466a726-serviceca\") pod \"62227ec8-217f-461a-8116-079b9466a726\" (UID: \"62227ec8-217f-461a-8116-079b9466a726\") " Sep 30 00:12:07 crc kubenswrapper[4809]: I0930 00:12:07.012585 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62227ec8-217f-461a-8116-079b9466a726-serviceca" (OuterVolumeSpecName: "serviceca") pod "62227ec8-217f-461a-8116-079b9466a726" (UID: "62227ec8-217f-461a-8116-079b9466a726"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:12:07 crc kubenswrapper[4809]: I0930 00:12:07.088426 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62227ec8-217f-461a-8116-079b9466a726-kube-api-access-kll62" (OuterVolumeSpecName: "kube-api-access-kll62") pod "62227ec8-217f-461a-8116-079b9466a726" (UID: "62227ec8-217f-461a-8116-079b9466a726"). InnerVolumeSpecName "kube-api-access-kll62". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:12:07 crc kubenswrapper[4809]: I0930 00:12:07.116551 4809 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/62227ec8-217f-461a-8116-079b9466a726-serviceca\") on node \"crc\" DevicePath \"\"" Sep 30 00:12:07 crc kubenswrapper[4809]: I0930 00:12:07.116681 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kll62\" (UniqueName: \"kubernetes.io/projected/62227ec8-217f-461a-8116-079b9466a726-kube-api-access-kll62\") on node \"crc\" DevicePath \"\"" Sep 30 00:12:07 crc kubenswrapper[4809]: I0930 00:12:07.517794 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29319840-n77h7" event={"ID":"62227ec8-217f-461a-8116-079b9466a726","Type":"ContainerDied","Data":"8d1a0fa5c90930996c6707f5ec585e90ee21dad11e85104d30c4b9bf6dbf73c2"} Sep 30 00:12:07 crc kubenswrapper[4809]: I0930 00:12:07.517878 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d1a0fa5c90930996c6707f5ec585e90ee21dad11e85104d30c4b9bf6dbf73c2" Sep 30 00:12:07 crc kubenswrapper[4809]: I0930 00:12:07.517989 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29319840-n77h7" Sep 30 00:12:07 crc kubenswrapper[4809]: E0930 00:12:07.910356 4809 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 30 00:12:07 crc kubenswrapper[4809]: E0930 00:12:07.910528 4809 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hb576,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-l74tx_openshift-marketplace(31398a45-e7dc-4b71-81ac-0fc5c16b9c26): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 00:12:07 crc kubenswrapper[4809]: E0930 00:12:07.911686 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-l74tx" podUID="31398a45-e7dc-4b71-81ac-0fc5c16b9c26" Sep 30 00:12:08 crc kubenswrapper[4809]: E0930 00:12:08.532011 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-l74tx" podUID="31398a45-e7dc-4b71-81ac-0fc5c16b9c26" Sep 30 00:12:08 crc kubenswrapper[4809]: E0930 00:12:08.593958 4809 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 30 00:12:08 crc kubenswrapper[4809]: E0930 00:12:08.594106 4809 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nmvk6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-hjqb8_openshift-marketplace(00358083-724a-47cb-b376-1cc3d97a9fab): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 00:12:08 crc kubenswrapper[4809]: E0930 00:12:08.595277 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-hjqb8" podUID="00358083-724a-47cb-b376-1cc3d97a9fab" Sep 30 00:12:09 crc kubenswrapper[4809]: E0930 00:12:09.139503 4809 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 30 00:12:09 crc kubenswrapper[4809]: E0930 00:12:09.139733 4809 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hhw6l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-j2ssl_openshift-marketplace(9c28e622-d89b-4819-b346-5cc07af83b8b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 00:12:09 crc kubenswrapper[4809]: E0930 00:12:09.141051 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-j2ssl" podUID="9c28e622-d89b-4819-b346-5cc07af83b8b" Sep 30 00:12:09 crc kubenswrapper[4809]: E0930 00:12:09.540266 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-hjqb8" podUID="00358083-724a-47cb-b376-1cc3d97a9fab" Sep 30 00:12:09 crc kubenswrapper[4809]: E0930 00:12:09.540582 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-j2ssl" podUID="9c28e622-d89b-4819-b346-5cc07af83b8b" Sep 30 00:12:09 crc kubenswrapper[4809]: E0930 00:12:09.625778 4809 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 30 00:12:09 crc kubenswrapper[4809]: E0930 00:12:09.625898 4809 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hf7x7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-7sd88_openshift-marketplace(7b54e11f-446c-47f6-9a5c-5a15853df320): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 00:12:09 crc kubenswrapper[4809]: E0930 00:12:09.627192 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-7sd88" podUID="7b54e11f-446c-47f6-9a5c-5a15853df320" Sep 30 00:12:11 crc kubenswrapper[4809]: I0930 00:12:11.451314 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkmqg" Sep 30 00:12:12 crc kubenswrapper[4809]: E0930 00:12:12.766121 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-7sd88" podUID="7b54e11f-446c-47f6-9a5c-5a15853df320" Sep 30 00:12:13 crc kubenswrapper[4809]: I0930 00:12:13.185449 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-4ktzq"] Sep 30 00:12:13 crc kubenswrapper[4809]: I0930 00:12:13.554208 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" event={"ID":"43a82899-34fa-4d03-9856-7303839926c2","Type":"ContainerStarted","Data":"f32379d3b44ed78c88066da5d1ed13d5c1d59ce05079bbceff327721330b8a4c"} Sep 30 00:12:13 crc kubenswrapper[4809]: I0930 00:12:13.554251 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" event={"ID":"43a82899-34fa-4d03-9856-7303839926c2","Type":"ContainerStarted","Data":"ed9a326740400bdcc961a9476319d5b692fcc547aae679a435254770fd59ec78"} Sep 30 00:12:13 crc kubenswrapper[4809]: I0930 00:12:13.556006 4809 generic.go:334] "Generic (PLEG): container finished" podID="a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2" containerID="89c667e44430b1de9c2c155d016a066b9a4327c9d1460318ca2935d132bec7ba" exitCode=0 Sep 30 00:12:13 crc kubenswrapper[4809]: I0930 00:12:13.556078 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8w9pr" event={"ID":"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2","Type":"ContainerDied","Data":"89c667e44430b1de9c2c155d016a066b9a4327c9d1460318ca2935d132bec7ba"} Sep 30 00:12:13 crc kubenswrapper[4809]: I0930 00:12:13.559109 4809 generic.go:334] "Generic (PLEG): container finished" podID="799d1bcb-02dd-4597-a154-3d3ce7b877db" containerID="6b1f461660a9ac327d1c45fa25c7d4155428312146cdefe87377d3578898267e" exitCode=0 Sep 30 00:12:13 crc kubenswrapper[4809]: I0930 00:12:13.559168 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mhgdc" event={"ID":"799d1bcb-02dd-4597-a154-3d3ce7b877db","Type":"ContainerDied","Data":"6b1f461660a9ac327d1c45fa25c7d4155428312146cdefe87377d3578898267e"} Sep 30 00:12:13 crc kubenswrapper[4809]: I0930 00:12:13.563670 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r48zg" event={"ID":"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774","Type":"ContainerStarted","Data":"f160d24551e21c61a32643f69ef37926ffe2094f3ee61b4a8affa9ddda8c421d"} Sep 30 00:12:13 crc kubenswrapper[4809]: I0930 00:12:13.566262 4809 generic.go:334] "Generic (PLEG): container finished" podID="852e34d6-9b62-4f31-b9b2-78caac40540d" containerID="b01835613d5aa95439fa1eb28dd99fbb57a748ef1c67f6975461382f12b9b8f3" exitCode=0 Sep 30 00:12:13 crc kubenswrapper[4809]: I0930 00:12:13.566298 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqgvp" event={"ID":"852e34d6-9b62-4f31-b9b2-78caac40540d","Type":"ContainerDied","Data":"b01835613d5aa95439fa1eb28dd99fbb57a748ef1c67f6975461382f12b9b8f3"} Sep 30 00:12:14 crc kubenswrapper[4809]: I0930 00:12:14.575760 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqgvp" event={"ID":"852e34d6-9b62-4f31-b9b2-78caac40540d","Type":"ContainerStarted","Data":"6933371c49ac319fc926eafb7c9cfc6d365fc9427f99503347cd1ca64a496e93"} Sep 30 00:12:14 crc kubenswrapper[4809]: I0930 00:12:14.577683 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-4ktzq" event={"ID":"43a82899-34fa-4d03-9856-7303839926c2","Type":"ContainerStarted","Data":"3099b545467af461fc21348bdd77383862c06438ff6c808d64170ea4bda3de29"} Sep 30 00:12:14 crc kubenswrapper[4809]: I0930 00:12:14.581941 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8w9pr" event={"ID":"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2","Type":"ContainerStarted","Data":"3d283f39ae196ccd3a42af35850a2f092579b0649ce88fabd1be9e74fc17c2cb"} Sep 30 00:12:14 crc kubenswrapper[4809]: I0930 00:12:14.586461 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mhgdc" event={"ID":"799d1bcb-02dd-4597-a154-3d3ce7b877db","Type":"ContainerStarted","Data":"f31fe6fdcc73adbff8a4c7d397a6d916085c05fdc735dd33bcc1d058de1b97a4"} Sep 30 00:12:14 crc kubenswrapper[4809]: I0930 00:12:14.588623 4809 generic.go:334] "Generic (PLEG): container finished" podID="558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774" containerID="f160d24551e21c61a32643f69ef37926ffe2094f3ee61b4a8affa9ddda8c421d" exitCode=0 Sep 30 00:12:14 crc kubenswrapper[4809]: I0930 00:12:14.588740 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r48zg" event={"ID":"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774","Type":"ContainerDied","Data":"f160d24551e21c61a32643f69ef37926ffe2094f3ee61b4a8affa9ddda8c421d"} Sep 30 00:12:14 crc kubenswrapper[4809]: I0930 00:12:14.588802 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r48zg" event={"ID":"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774","Type":"ContainerStarted","Data":"a6cfb355f82f3095122e0e3b42a435cbdb2856586f704feeeff43b21cd94ae7a"} Sep 30 00:12:14 crc kubenswrapper[4809]: I0930 00:12:14.617626 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xqgvp" podStartSLOduration=2.698004585 podStartE2EDuration="36.617607828s" podCreationTimestamp="2025-09-30 00:11:38 +0000 UTC" firstStartedPulling="2025-09-30 00:11:40.205743154 +0000 UTC m=+151.241992562" lastFinishedPulling="2025-09-30 00:12:14.125346357 +0000 UTC m=+185.161595805" observedRunningTime="2025-09-30 00:12:14.615815234 +0000 UTC m=+185.652064652" watchObservedRunningTime="2025-09-30 00:12:14.617607828 +0000 UTC m=+185.653857236" Sep 30 00:12:14 crc kubenswrapper[4809]: I0930 00:12:14.650438 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-r48zg" podStartSLOduration=2.719895578 podStartE2EDuration="33.650421143s" podCreationTimestamp="2025-09-30 00:11:41 +0000 UTC" firstStartedPulling="2025-09-30 00:11:43.320968518 +0000 UTC m=+154.357217926" lastFinishedPulling="2025-09-30 00:12:14.251494083 +0000 UTC m=+185.287743491" observedRunningTime="2025-09-30 00:12:14.648425962 +0000 UTC m=+185.684675370" watchObservedRunningTime="2025-09-30 00:12:14.650421143 +0000 UTC m=+185.686670561" Sep 30 00:12:14 crc kubenswrapper[4809]: I0930 00:12:14.680298 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-4ktzq" podStartSLOduration=163.680275409 podStartE2EDuration="2m43.680275409s" podCreationTimestamp="2025-09-30 00:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:12:14.679759924 +0000 UTC m=+185.716009332" watchObservedRunningTime="2025-09-30 00:12:14.680275409 +0000 UTC m=+185.716524827" Sep 30 00:12:14 crc kubenswrapper[4809]: I0930 00:12:14.702699 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mhgdc" podStartSLOduration=2.888601622 podStartE2EDuration="34.702684919s" podCreationTimestamp="2025-09-30 00:11:40 +0000 UTC" firstStartedPulling="2025-09-30 00:11:42.286316823 +0000 UTC m=+153.322566241" lastFinishedPulling="2025-09-30 00:12:14.10040013 +0000 UTC m=+185.136649538" observedRunningTime="2025-09-30 00:12:14.70106907 +0000 UTC m=+185.737318518" watchObservedRunningTime="2025-09-30 00:12:14.702684919 +0000 UTC m=+185.738934327" Sep 30 00:12:14 crc kubenswrapper[4809]: I0930 00:12:14.724917 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8w9pr" podStartSLOduration=2.997974303 podStartE2EDuration="33.724899923s" podCreationTimestamp="2025-09-30 00:11:41 +0000 UTC" firstStartedPulling="2025-09-30 00:11:43.313998926 +0000 UTC m=+154.350248334" lastFinishedPulling="2025-09-30 00:12:14.040924546 +0000 UTC m=+185.077173954" observedRunningTime="2025-09-30 00:12:14.722354786 +0000 UTC m=+185.758604214" watchObservedRunningTime="2025-09-30 00:12:14.724899923 +0000 UTC m=+185.761149331" Sep 30 00:12:17 crc kubenswrapper[4809]: I0930 00:12:17.930893 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 00:12:18 crc kubenswrapper[4809]: I0930 00:12:18.921163 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xqgvp" Sep 30 00:12:18 crc kubenswrapper[4809]: I0930 00:12:18.921248 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xqgvp" Sep 30 00:12:19 crc kubenswrapper[4809]: I0930 00:12:19.161046 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xqgvp" Sep 30 00:12:19 crc kubenswrapper[4809]: I0930 00:12:19.702084 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xqgvp" Sep 30 00:12:20 crc kubenswrapper[4809]: I0930 00:12:20.758840 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xqgvp"] Sep 30 00:12:21 crc kubenswrapper[4809]: I0930 00:12:21.138531 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mhgdc" Sep 30 00:12:21 crc kubenswrapper[4809]: I0930 00:12:21.139248 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mhgdc" Sep 30 00:12:21 crc kubenswrapper[4809]: I0930 00:12:21.221077 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mhgdc" Sep 30 00:12:21 crc kubenswrapper[4809]: I0930 00:12:21.645012 4809 generic.go:334] "Generic (PLEG): container finished" podID="9c28e622-d89b-4819-b346-5cc07af83b8b" containerID="39e1d80044b22d3d37c142c444487a4306141ff1d77dee215b6a61dd4f928a93" exitCode=0 Sep 30 00:12:21 crc kubenswrapper[4809]: I0930 00:12:21.645125 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j2ssl" event={"ID":"9c28e622-d89b-4819-b346-5cc07af83b8b","Type":"ContainerDied","Data":"39e1d80044b22d3d37c142c444487a4306141ff1d77dee215b6a61dd4f928a93"} Sep 30 00:12:21 crc kubenswrapper[4809]: I0930 00:12:21.648081 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l74tx" event={"ID":"31398a45-e7dc-4b71-81ac-0fc5c16b9c26","Type":"ContainerStarted","Data":"632d98fd1c6f44e71cf36faccdf3825b0b11e36407c4c1892e71e74d32c8438b"} Sep 30 00:12:21 crc kubenswrapper[4809]: I0930 00:12:21.648265 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xqgvp" podUID="852e34d6-9b62-4f31-b9b2-78caac40540d" containerName="registry-server" containerID="cri-o://6933371c49ac319fc926eafb7c9cfc6d365fc9427f99503347cd1ca64a496e93" gracePeriod=2 Sep 30 00:12:21 crc kubenswrapper[4809]: I0930 00:12:21.727328 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mhgdc" Sep 30 00:12:21 crc kubenswrapper[4809]: I0930 00:12:21.727406 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8w9pr" Sep 30 00:12:21 crc kubenswrapper[4809]: I0930 00:12:21.727426 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8w9pr" Sep 30 00:12:21 crc kubenswrapper[4809]: I0930 00:12:21.762985 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8w9pr" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.129182 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-r48zg" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.129781 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-r48zg" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.188017 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-r48zg" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.230475 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xqgvp" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.287000 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/852e34d6-9b62-4f31-b9b2-78caac40540d-catalog-content\") pod \"852e34d6-9b62-4f31-b9b2-78caac40540d\" (UID: \"852e34d6-9b62-4f31-b9b2-78caac40540d\") " Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.287082 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/852e34d6-9b62-4f31-b9b2-78caac40540d-utilities\") pod \"852e34d6-9b62-4f31-b9b2-78caac40540d\" (UID: \"852e34d6-9b62-4f31-b9b2-78caac40540d\") " Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.287108 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-knn2p\" (UniqueName: \"kubernetes.io/projected/852e34d6-9b62-4f31-b9b2-78caac40540d-kube-api-access-knn2p\") pod \"852e34d6-9b62-4f31-b9b2-78caac40540d\" (UID: \"852e34d6-9b62-4f31-b9b2-78caac40540d\") " Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.289384 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/852e34d6-9b62-4f31-b9b2-78caac40540d-utilities" (OuterVolumeSpecName: "utilities") pod "852e34d6-9b62-4f31-b9b2-78caac40540d" (UID: "852e34d6-9b62-4f31-b9b2-78caac40540d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.304234 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/852e34d6-9b62-4f31-b9b2-78caac40540d-kube-api-access-knn2p" (OuterVolumeSpecName: "kube-api-access-knn2p") pod "852e34d6-9b62-4f31-b9b2-78caac40540d" (UID: "852e34d6-9b62-4f31-b9b2-78caac40540d"). InnerVolumeSpecName "kube-api-access-knn2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.361203 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/852e34d6-9b62-4f31-b9b2-78caac40540d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "852e34d6-9b62-4f31-b9b2-78caac40540d" (UID: "852e34d6-9b62-4f31-b9b2-78caac40540d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.389023 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/852e34d6-9b62-4f31-b9b2-78caac40540d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.389074 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/852e34d6-9b62-4f31-b9b2-78caac40540d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.389094 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-knn2p\" (UniqueName: \"kubernetes.io/projected/852e34d6-9b62-4f31-b9b2-78caac40540d-kube-api-access-knn2p\") on node \"crc\" DevicePath \"\"" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.656394 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j2ssl" event={"ID":"9c28e622-d89b-4819-b346-5cc07af83b8b","Type":"ContainerStarted","Data":"7a192ee85a510a47f5ee07c8f2d881fd34d66b53daebebf03176d0c13d09d096"} Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.659016 4809 generic.go:334] "Generic (PLEG): container finished" podID="31398a45-e7dc-4b71-81ac-0fc5c16b9c26" containerID="632d98fd1c6f44e71cf36faccdf3825b0b11e36407c4c1892e71e74d32c8438b" exitCode=0 Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.659120 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l74tx" event={"ID":"31398a45-e7dc-4b71-81ac-0fc5c16b9c26","Type":"ContainerDied","Data":"632d98fd1c6f44e71cf36faccdf3825b0b11e36407c4c1892e71e74d32c8438b"} Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.664328 4809 generic.go:334] "Generic (PLEG): container finished" podID="852e34d6-9b62-4f31-b9b2-78caac40540d" containerID="6933371c49ac319fc926eafb7c9cfc6d365fc9427f99503347cd1ca64a496e93" exitCode=0 Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.665546 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xqgvp" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.666213 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqgvp" event={"ID":"852e34d6-9b62-4f31-b9b2-78caac40540d","Type":"ContainerDied","Data":"6933371c49ac319fc926eafb7c9cfc6d365fc9427f99503347cd1ca64a496e93"} Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.666294 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqgvp" event={"ID":"852e34d6-9b62-4f31-b9b2-78caac40540d","Type":"ContainerDied","Data":"65f5ccbd351d56ebfbdd8ad9f2cc96d0831bcc008821120ce43f443be0881254"} Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.666340 4809 scope.go:117] "RemoveContainer" containerID="6933371c49ac319fc926eafb7c9cfc6d365fc9427f99503347cd1ca64a496e93" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.686996 4809 scope.go:117] "RemoveContainer" containerID="b01835613d5aa95439fa1eb28dd99fbb57a748ef1c67f6975461382f12b9b8f3" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.688406 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-j2ssl" podStartSLOduration=2.7234729570000002 podStartE2EDuration="44.688360598s" podCreationTimestamp="2025-09-30 00:11:38 +0000 UTC" firstStartedPulling="2025-09-30 00:11:40.173746792 +0000 UTC m=+151.209996200" lastFinishedPulling="2025-09-30 00:12:22.138634433 +0000 UTC m=+193.174883841" observedRunningTime="2025-09-30 00:12:22.676924081 +0000 UTC m=+193.713173509" watchObservedRunningTime="2025-09-30 00:12:22.688360598 +0000 UTC m=+193.724610006" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.716388 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xqgvp"] Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.728247 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xqgvp"] Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.739489 4809 scope.go:117] "RemoveContainer" containerID="1ebf9a9ac0e26466ce68d52dce9498e508c99ad02e5ac09927568e53bbc8b906" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.741239 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-r48zg" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.745413 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8w9pr" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.795102 4809 scope.go:117] "RemoveContainer" containerID="6933371c49ac319fc926eafb7c9cfc6d365fc9427f99503347cd1ca64a496e93" Sep 30 00:12:22 crc kubenswrapper[4809]: E0930 00:12:22.795500 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6933371c49ac319fc926eafb7c9cfc6d365fc9427f99503347cd1ca64a496e93\": container with ID starting with 6933371c49ac319fc926eafb7c9cfc6d365fc9427f99503347cd1ca64a496e93 not found: ID does not exist" containerID="6933371c49ac319fc926eafb7c9cfc6d365fc9427f99503347cd1ca64a496e93" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.795525 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6933371c49ac319fc926eafb7c9cfc6d365fc9427f99503347cd1ca64a496e93"} err="failed to get container status \"6933371c49ac319fc926eafb7c9cfc6d365fc9427f99503347cd1ca64a496e93\": rpc error: code = NotFound desc = could not find container \"6933371c49ac319fc926eafb7c9cfc6d365fc9427f99503347cd1ca64a496e93\": container with ID starting with 6933371c49ac319fc926eafb7c9cfc6d365fc9427f99503347cd1ca64a496e93 not found: ID does not exist" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.795566 4809 scope.go:117] "RemoveContainer" containerID="b01835613d5aa95439fa1eb28dd99fbb57a748ef1c67f6975461382f12b9b8f3" Sep 30 00:12:22 crc kubenswrapper[4809]: E0930 00:12:22.795793 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b01835613d5aa95439fa1eb28dd99fbb57a748ef1c67f6975461382f12b9b8f3\": container with ID starting with b01835613d5aa95439fa1eb28dd99fbb57a748ef1c67f6975461382f12b9b8f3 not found: ID does not exist" containerID="b01835613d5aa95439fa1eb28dd99fbb57a748ef1c67f6975461382f12b9b8f3" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.795807 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b01835613d5aa95439fa1eb28dd99fbb57a748ef1c67f6975461382f12b9b8f3"} err="failed to get container status \"b01835613d5aa95439fa1eb28dd99fbb57a748ef1c67f6975461382f12b9b8f3\": rpc error: code = NotFound desc = could not find container \"b01835613d5aa95439fa1eb28dd99fbb57a748ef1c67f6975461382f12b9b8f3\": container with ID starting with b01835613d5aa95439fa1eb28dd99fbb57a748ef1c67f6975461382f12b9b8f3 not found: ID does not exist" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.795819 4809 scope.go:117] "RemoveContainer" containerID="1ebf9a9ac0e26466ce68d52dce9498e508c99ad02e5ac09927568e53bbc8b906" Sep 30 00:12:22 crc kubenswrapper[4809]: E0930 00:12:22.795981 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ebf9a9ac0e26466ce68d52dce9498e508c99ad02e5ac09927568e53bbc8b906\": container with ID starting with 1ebf9a9ac0e26466ce68d52dce9498e508c99ad02e5ac09927568e53bbc8b906 not found: ID does not exist" containerID="1ebf9a9ac0e26466ce68d52dce9498e508c99ad02e5ac09927568e53bbc8b906" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.795995 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ebf9a9ac0e26466ce68d52dce9498e508c99ad02e5ac09927568e53bbc8b906"} err="failed to get container status \"1ebf9a9ac0e26466ce68d52dce9498e508c99ad02e5ac09927568e53bbc8b906\": rpc error: code = NotFound desc = could not find container \"1ebf9a9ac0e26466ce68d52dce9498e508c99ad02e5ac09927568e53bbc8b906\": container with ID starting with 1ebf9a9ac0e26466ce68d52dce9498e508c99ad02e5ac09927568e53bbc8b906 not found: ID does not exist" Sep 30 00:12:22 crc kubenswrapper[4809]: I0930 00:12:22.958963 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mhgdc"] Sep 30 00:12:23 crc kubenswrapper[4809]: I0930 00:12:23.676420 4809 generic.go:334] "Generic (PLEG): container finished" podID="00358083-724a-47cb-b376-1cc3d97a9fab" containerID="bbb8d99a399cf4efd9c91c67831b7f5120b8fe1116b71b3e87aea1aacf7e1015" exitCode=0 Sep 30 00:12:23 crc kubenswrapper[4809]: I0930 00:12:23.676514 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hjqb8" event={"ID":"00358083-724a-47cb-b376-1cc3d97a9fab","Type":"ContainerDied","Data":"bbb8d99a399cf4efd9c91c67831b7f5120b8fe1116b71b3e87aea1aacf7e1015"} Sep 30 00:12:23 crc kubenswrapper[4809]: I0930 00:12:23.681343 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l74tx" event={"ID":"31398a45-e7dc-4b71-81ac-0fc5c16b9c26","Type":"ContainerStarted","Data":"ad435bd229a89a247a262af19b3e46f8f29ba298f78e3929149fc62d93962853"} Sep 30 00:12:23 crc kubenswrapper[4809]: I0930 00:12:23.699867 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="852e34d6-9b62-4f31-b9b2-78caac40540d" path="/var/lib/kubelet/pods/852e34d6-9b62-4f31-b9b2-78caac40540d/volumes" Sep 30 00:12:23 crc kubenswrapper[4809]: I0930 00:12:23.724046 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-l74tx" podStartSLOduration=1.854163025 podStartE2EDuration="43.724024573s" podCreationTimestamp="2025-09-30 00:11:40 +0000 UTC" firstStartedPulling="2025-09-30 00:11:41.252438632 +0000 UTC m=+152.288688040" lastFinishedPulling="2025-09-30 00:12:23.12230018 +0000 UTC m=+194.158549588" observedRunningTime="2025-09-30 00:12:23.722968891 +0000 UTC m=+194.759218299" watchObservedRunningTime="2025-09-30 00:12:23.724024573 +0000 UTC m=+194.760273981" Sep 30 00:12:24 crc kubenswrapper[4809]: I0930 00:12:24.689610 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mhgdc" podUID="799d1bcb-02dd-4597-a154-3d3ce7b877db" containerName="registry-server" containerID="cri-o://f31fe6fdcc73adbff8a4c7d397a6d916085c05fdc735dd33bcc1d058de1b97a4" gracePeriod=2 Sep 30 00:12:24 crc kubenswrapper[4809]: I0930 00:12:24.690349 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hjqb8" event={"ID":"00358083-724a-47cb-b376-1cc3d97a9fab","Type":"ContainerStarted","Data":"65b6cfa940b9e46dfd89945c770df52aff88b265c2941de6c0761c7716819e32"} Sep 30 00:12:25 crc kubenswrapper[4809]: I0930 00:12:25.325680 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:12:25 crc kubenswrapper[4809]: I0930 00:12:25.325788 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:12:25 crc kubenswrapper[4809]: I0930 00:12:25.357988 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r48zg"] Sep 30 00:12:25 crc kubenswrapper[4809]: I0930 00:12:25.358398 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-r48zg" podUID="558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774" containerName="registry-server" containerID="cri-o://a6cfb355f82f3095122e0e3b42a435cbdb2856586f704feeeff43b21cd94ae7a" gracePeriod=2 Sep 30 00:12:25 crc kubenswrapper[4809]: I0930 00:12:25.699736 4809 generic.go:334] "Generic (PLEG): container finished" podID="799d1bcb-02dd-4597-a154-3d3ce7b877db" containerID="f31fe6fdcc73adbff8a4c7d397a6d916085c05fdc735dd33bcc1d058de1b97a4" exitCode=0 Sep 30 00:12:25 crc kubenswrapper[4809]: I0930 00:12:25.699845 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mhgdc" event={"ID":"799d1bcb-02dd-4597-a154-3d3ce7b877db","Type":"ContainerDied","Data":"f31fe6fdcc73adbff8a4c7d397a6d916085c05fdc735dd33bcc1d058de1b97a4"} Sep 30 00:12:25 crc kubenswrapper[4809]: I0930 00:12:25.724916 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hjqb8" podStartSLOduration=3.577138412 podStartE2EDuration="47.724894965s" podCreationTimestamp="2025-09-30 00:11:38 +0000 UTC" firstStartedPulling="2025-09-30 00:11:40.190108189 +0000 UTC m=+151.226357607" lastFinishedPulling="2025-09-30 00:12:24.337864752 +0000 UTC m=+195.374114160" observedRunningTime="2025-09-30 00:12:25.722119491 +0000 UTC m=+196.758368899" watchObservedRunningTime="2025-09-30 00:12:25.724894965 +0000 UTC m=+196.761144373" Sep 30 00:12:26 crc kubenswrapper[4809]: I0930 00:12:26.038003 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mhgdc" Sep 30 00:12:26 crc kubenswrapper[4809]: I0930 00:12:26.145966 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/799d1bcb-02dd-4597-a154-3d3ce7b877db-catalog-content\") pod \"799d1bcb-02dd-4597-a154-3d3ce7b877db\" (UID: \"799d1bcb-02dd-4597-a154-3d3ce7b877db\") " Sep 30 00:12:26 crc kubenswrapper[4809]: I0930 00:12:26.146060 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/799d1bcb-02dd-4597-a154-3d3ce7b877db-utilities\") pod \"799d1bcb-02dd-4597-a154-3d3ce7b877db\" (UID: \"799d1bcb-02dd-4597-a154-3d3ce7b877db\") " Sep 30 00:12:26 crc kubenswrapper[4809]: I0930 00:12:26.146108 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vrmh2\" (UniqueName: \"kubernetes.io/projected/799d1bcb-02dd-4597-a154-3d3ce7b877db-kube-api-access-vrmh2\") pod \"799d1bcb-02dd-4597-a154-3d3ce7b877db\" (UID: \"799d1bcb-02dd-4597-a154-3d3ce7b877db\") " Sep 30 00:12:26 crc kubenswrapper[4809]: I0930 00:12:26.147058 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/799d1bcb-02dd-4597-a154-3d3ce7b877db-utilities" (OuterVolumeSpecName: "utilities") pod "799d1bcb-02dd-4597-a154-3d3ce7b877db" (UID: "799d1bcb-02dd-4597-a154-3d3ce7b877db"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:12:26 crc kubenswrapper[4809]: I0930 00:12:26.157879 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/799d1bcb-02dd-4597-a154-3d3ce7b877db-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "799d1bcb-02dd-4597-a154-3d3ce7b877db" (UID: "799d1bcb-02dd-4597-a154-3d3ce7b877db"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:12:26 crc kubenswrapper[4809]: I0930 00:12:26.165344 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/799d1bcb-02dd-4597-a154-3d3ce7b877db-kube-api-access-vrmh2" (OuterVolumeSpecName: "kube-api-access-vrmh2") pod "799d1bcb-02dd-4597-a154-3d3ce7b877db" (UID: "799d1bcb-02dd-4597-a154-3d3ce7b877db"). InnerVolumeSpecName "kube-api-access-vrmh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:12:26 crc kubenswrapper[4809]: I0930 00:12:26.247636 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/799d1bcb-02dd-4597-a154-3d3ce7b877db-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:12:26 crc kubenswrapper[4809]: I0930 00:12:26.247712 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/799d1bcb-02dd-4597-a154-3d3ce7b877db-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:12:26 crc kubenswrapper[4809]: I0930 00:12:26.247726 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrmh2\" (UniqueName: \"kubernetes.io/projected/799d1bcb-02dd-4597-a154-3d3ce7b877db-kube-api-access-vrmh2\") on node \"crc\" DevicePath \"\"" Sep 30 00:12:26 crc kubenswrapper[4809]: I0930 00:12:26.717402 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mhgdc" event={"ID":"799d1bcb-02dd-4597-a154-3d3ce7b877db","Type":"ContainerDied","Data":"bb392c35b9fe79a27120f08d4e07a92cc77bd83485a726096e37a9bfc7d8ca62"} Sep 30 00:12:26 crc kubenswrapper[4809]: I0930 00:12:26.717466 4809 scope.go:117] "RemoveContainer" containerID="f31fe6fdcc73adbff8a4c7d397a6d916085c05fdc735dd33bcc1d058de1b97a4" Sep 30 00:12:26 crc kubenswrapper[4809]: I0930 00:12:26.717623 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mhgdc" Sep 30 00:12:26 crc kubenswrapper[4809]: I0930 00:12:26.723465 4809 generic.go:334] "Generic (PLEG): container finished" podID="558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774" containerID="a6cfb355f82f3095122e0e3b42a435cbdb2856586f704feeeff43b21cd94ae7a" exitCode=0 Sep 30 00:12:26 crc kubenswrapper[4809]: I0930 00:12:26.723779 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r48zg" event={"ID":"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774","Type":"ContainerDied","Data":"a6cfb355f82f3095122e0e3b42a435cbdb2856586f704feeeff43b21cd94ae7a"} Sep 30 00:12:26 crc kubenswrapper[4809]: I0930 00:12:26.744730 4809 scope.go:117] "RemoveContainer" containerID="6b1f461660a9ac327d1c45fa25c7d4155428312146cdefe87377d3578898267e" Sep 30 00:12:26 crc kubenswrapper[4809]: I0930 00:12:26.772609 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mhgdc"] Sep 30 00:12:26 crc kubenswrapper[4809]: I0930 00:12:26.784484 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mhgdc"] Sep 30 00:12:26 crc kubenswrapper[4809]: I0930 00:12:26.787196 4809 scope.go:117] "RemoveContainer" containerID="3d3520904d40e4be1c67765b1675fc02569bb73d570f0c600e93df73862c9a5a" Sep 30 00:12:26 crc kubenswrapper[4809]: I0930 00:12:26.904312 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r48zg" Sep 30 00:12:27 crc kubenswrapper[4809]: I0930 00:12:27.068863 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774-utilities\") pod \"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774\" (UID: \"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774\") " Sep 30 00:12:27 crc kubenswrapper[4809]: I0930 00:12:27.068957 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774-catalog-content\") pod \"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774\" (UID: \"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774\") " Sep 30 00:12:27 crc kubenswrapper[4809]: I0930 00:12:27.069010 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwqrg\" (UniqueName: \"kubernetes.io/projected/558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774-kube-api-access-xwqrg\") pod \"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774\" (UID: \"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774\") " Sep 30 00:12:27 crc kubenswrapper[4809]: I0930 00:12:27.071345 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774-utilities" (OuterVolumeSpecName: "utilities") pod "558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774" (UID: "558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:12:27 crc kubenswrapper[4809]: I0930 00:12:27.079979 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774-kube-api-access-xwqrg" (OuterVolumeSpecName: "kube-api-access-xwqrg") pod "558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774" (UID: "558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774"). InnerVolumeSpecName "kube-api-access-xwqrg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:12:27 crc kubenswrapper[4809]: I0930 00:12:27.164984 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774" (UID: "558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:12:27 crc kubenswrapper[4809]: I0930 00:12:27.170790 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:12:27 crc kubenswrapper[4809]: I0930 00:12:27.170824 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:12:27 crc kubenswrapper[4809]: I0930 00:12:27.170839 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwqrg\" (UniqueName: \"kubernetes.io/projected/558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774-kube-api-access-xwqrg\") on node \"crc\" DevicePath \"\"" Sep 30 00:12:27 crc kubenswrapper[4809]: I0930 00:12:27.703567 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="799d1bcb-02dd-4597-a154-3d3ce7b877db" path="/var/lib/kubelet/pods/799d1bcb-02dd-4597-a154-3d3ce7b877db/volumes" Sep 30 00:12:27 crc kubenswrapper[4809]: I0930 00:12:27.734102 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r48zg" Sep 30 00:12:27 crc kubenswrapper[4809]: I0930 00:12:27.734821 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r48zg" event={"ID":"558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774","Type":"ContainerDied","Data":"4ae7c2f4ccc900ec397bfc470f8b6eb601e054cac6dfceaf2be7850ab162e1b5"} Sep 30 00:12:27 crc kubenswrapper[4809]: I0930 00:12:27.734908 4809 scope.go:117] "RemoveContainer" containerID="a6cfb355f82f3095122e0e3b42a435cbdb2856586f704feeeff43b21cd94ae7a" Sep 30 00:12:27 crc kubenswrapper[4809]: I0930 00:12:27.737920 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7sd88" event={"ID":"7b54e11f-446c-47f6-9a5c-5a15853df320","Type":"ContainerStarted","Data":"26553c705c30e24ef4223ebe2ee0497320d5751cbb30b0dc649b30cc45c96fd9"} Sep 30 00:12:27 crc kubenswrapper[4809]: I0930 00:12:27.774057 4809 scope.go:117] "RemoveContainer" containerID="f160d24551e21c61a32643f69ef37926ffe2094f3ee61b4a8affa9ddda8c421d" Sep 30 00:12:27 crc kubenswrapper[4809]: I0930 00:12:27.787806 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r48zg"] Sep 30 00:12:27 crc kubenswrapper[4809]: I0930 00:12:27.791394 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-r48zg"] Sep 30 00:12:27 crc kubenswrapper[4809]: I0930 00:12:27.798118 4809 scope.go:117] "RemoveContainer" containerID="10d8032fee5752edb87df74b2b1e49239d1e05156707b5a6abc2490a5ac648c0" Sep 30 00:12:28 crc kubenswrapper[4809]: I0930 00:12:28.630886 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-j2ssl" Sep 30 00:12:28 crc kubenswrapper[4809]: I0930 00:12:28.631226 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-j2ssl" Sep 30 00:12:28 crc kubenswrapper[4809]: I0930 00:12:28.682582 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-j2ssl" Sep 30 00:12:28 crc kubenswrapper[4809]: I0930 00:12:28.724222 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hjqb8" Sep 30 00:12:28 crc kubenswrapper[4809]: I0930 00:12:28.724279 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hjqb8" Sep 30 00:12:28 crc kubenswrapper[4809]: I0930 00:12:28.751817 4809 generic.go:334] "Generic (PLEG): container finished" podID="7b54e11f-446c-47f6-9a5c-5a15853df320" containerID="26553c705c30e24ef4223ebe2ee0497320d5751cbb30b0dc649b30cc45c96fd9" exitCode=0 Sep 30 00:12:28 crc kubenswrapper[4809]: I0930 00:12:28.751905 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7sd88" event={"ID":"7b54e11f-446c-47f6-9a5c-5a15853df320","Type":"ContainerDied","Data":"26553c705c30e24ef4223ebe2ee0497320d5751cbb30b0dc649b30cc45c96fd9"} Sep 30 00:12:28 crc kubenswrapper[4809]: I0930 00:12:28.776360 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hjqb8" Sep 30 00:12:28 crc kubenswrapper[4809]: I0930 00:12:28.822923 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-j2ssl" Sep 30 00:12:29 crc kubenswrapper[4809]: I0930 00:12:29.697917 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774" path="/var/lib/kubelet/pods/558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774/volumes" Sep 30 00:12:29 crc kubenswrapper[4809]: I0930 00:12:29.767504 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7sd88" event={"ID":"7b54e11f-446c-47f6-9a5c-5a15853df320","Type":"ContainerStarted","Data":"1adb465fc010aceb38c481c0b0a5428a052f8c81aee8b8c66ba7ab84fb0c7934"} Sep 30 00:12:29 crc kubenswrapper[4809]: I0930 00:12:29.786950 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7sd88" podStartSLOduration=2.715671294 podStartE2EDuration="51.786935484s" podCreationTimestamp="2025-09-30 00:11:38 +0000 UTC" firstStartedPulling="2025-09-30 00:11:40.184046605 +0000 UTC m=+151.220296023" lastFinishedPulling="2025-09-30 00:12:29.255310805 +0000 UTC m=+200.291560213" observedRunningTime="2025-09-30 00:12:29.784155272 +0000 UTC m=+200.820404670" watchObservedRunningTime="2025-09-30 00:12:29.786935484 +0000 UTC m=+200.823184892" Sep 30 00:12:30 crc kubenswrapper[4809]: I0930 00:12:30.757277 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-l74tx" Sep 30 00:12:30 crc kubenswrapper[4809]: I0930 00:12:30.758104 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-l74tx" Sep 30 00:12:30 crc kubenswrapper[4809]: I0930 00:12:30.837046 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-l74tx" Sep 30 00:12:31 crc kubenswrapper[4809]: I0930 00:12:31.820988 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-l74tx" Sep 30 00:12:38 crc kubenswrapper[4809]: I0930 00:12:38.762968 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hjqb8" Sep 30 00:12:39 crc kubenswrapper[4809]: I0930 00:12:39.111324 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7sd88" Sep 30 00:12:39 crc kubenswrapper[4809]: I0930 00:12:39.111748 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7sd88" Sep 30 00:12:39 crc kubenswrapper[4809]: I0930 00:12:39.186000 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7sd88" Sep 30 00:12:39 crc kubenswrapper[4809]: I0930 00:12:39.869281 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7sd88" Sep 30 00:12:40 crc kubenswrapper[4809]: I0930 00:12:40.353927 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7sd88"] Sep 30 00:12:41 crc kubenswrapper[4809]: I0930 00:12:41.829538 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7sd88" podUID="7b54e11f-446c-47f6-9a5c-5a15853df320" containerName="registry-server" containerID="cri-o://1adb465fc010aceb38c481c0b0a5428a052f8c81aee8b8c66ba7ab84fb0c7934" gracePeriod=2 Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.229733 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7sd88" Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.388412 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hf7x7\" (UniqueName: \"kubernetes.io/projected/7b54e11f-446c-47f6-9a5c-5a15853df320-kube-api-access-hf7x7\") pod \"7b54e11f-446c-47f6-9a5c-5a15853df320\" (UID: \"7b54e11f-446c-47f6-9a5c-5a15853df320\") " Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.388510 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b54e11f-446c-47f6-9a5c-5a15853df320-utilities\") pod \"7b54e11f-446c-47f6-9a5c-5a15853df320\" (UID: \"7b54e11f-446c-47f6-9a5c-5a15853df320\") " Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.388570 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b54e11f-446c-47f6-9a5c-5a15853df320-catalog-content\") pod \"7b54e11f-446c-47f6-9a5c-5a15853df320\" (UID: \"7b54e11f-446c-47f6-9a5c-5a15853df320\") " Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.389675 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b54e11f-446c-47f6-9a5c-5a15853df320-utilities" (OuterVolumeSpecName: "utilities") pod "7b54e11f-446c-47f6-9a5c-5a15853df320" (UID: "7b54e11f-446c-47f6-9a5c-5a15853df320"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.394941 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b54e11f-446c-47f6-9a5c-5a15853df320-kube-api-access-hf7x7" (OuterVolumeSpecName: "kube-api-access-hf7x7") pod "7b54e11f-446c-47f6-9a5c-5a15853df320" (UID: "7b54e11f-446c-47f6-9a5c-5a15853df320"). InnerVolumeSpecName "kube-api-access-hf7x7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.438361 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b54e11f-446c-47f6-9a5c-5a15853df320-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7b54e11f-446c-47f6-9a5c-5a15853df320" (UID: "7b54e11f-446c-47f6-9a5c-5a15853df320"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.490056 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hf7x7\" (UniqueName: \"kubernetes.io/projected/7b54e11f-446c-47f6-9a5c-5a15853df320-kube-api-access-hf7x7\") on node \"crc\" DevicePath \"\"" Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.490096 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b54e11f-446c-47f6-9a5c-5a15853df320-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.490107 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b54e11f-446c-47f6-9a5c-5a15853df320-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.842779 4809 generic.go:334] "Generic (PLEG): container finished" podID="7b54e11f-446c-47f6-9a5c-5a15853df320" containerID="1adb465fc010aceb38c481c0b0a5428a052f8c81aee8b8c66ba7ab84fb0c7934" exitCode=0 Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.842830 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7sd88" event={"ID":"7b54e11f-446c-47f6-9a5c-5a15853df320","Type":"ContainerDied","Data":"1adb465fc010aceb38c481c0b0a5428a052f8c81aee8b8c66ba7ab84fb0c7934"} Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.842859 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7sd88" event={"ID":"7b54e11f-446c-47f6-9a5c-5a15853df320","Type":"ContainerDied","Data":"fa7a9f1394d6dcabf049111d0c97cbcf60b99a527ceaee01a9b596f73c183c37"} Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.842876 4809 scope.go:117] "RemoveContainer" containerID="1adb465fc010aceb38c481c0b0a5428a052f8c81aee8b8c66ba7ab84fb0c7934" Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.842878 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7sd88" Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.864713 4809 scope.go:117] "RemoveContainer" containerID="26553c705c30e24ef4223ebe2ee0497320d5751cbb30b0dc649b30cc45c96fd9" Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.884994 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7sd88"] Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.887294 4809 scope.go:117] "RemoveContainer" containerID="570a547c4439b8a8258ab742da1d9c1f56ed3e8b6a349b4b031e6322b4c02052" Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.887322 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7sd88"] Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.902891 4809 scope.go:117] "RemoveContainer" containerID="1adb465fc010aceb38c481c0b0a5428a052f8c81aee8b8c66ba7ab84fb0c7934" Sep 30 00:12:42 crc kubenswrapper[4809]: E0930 00:12:42.903429 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1adb465fc010aceb38c481c0b0a5428a052f8c81aee8b8c66ba7ab84fb0c7934\": container with ID starting with 1adb465fc010aceb38c481c0b0a5428a052f8c81aee8b8c66ba7ab84fb0c7934 not found: ID does not exist" containerID="1adb465fc010aceb38c481c0b0a5428a052f8c81aee8b8c66ba7ab84fb0c7934" Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.903489 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1adb465fc010aceb38c481c0b0a5428a052f8c81aee8b8c66ba7ab84fb0c7934"} err="failed to get container status \"1adb465fc010aceb38c481c0b0a5428a052f8c81aee8b8c66ba7ab84fb0c7934\": rpc error: code = NotFound desc = could not find container \"1adb465fc010aceb38c481c0b0a5428a052f8c81aee8b8c66ba7ab84fb0c7934\": container with ID starting with 1adb465fc010aceb38c481c0b0a5428a052f8c81aee8b8c66ba7ab84fb0c7934 not found: ID does not exist" Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.903533 4809 scope.go:117] "RemoveContainer" containerID="26553c705c30e24ef4223ebe2ee0497320d5751cbb30b0dc649b30cc45c96fd9" Sep 30 00:12:42 crc kubenswrapper[4809]: E0930 00:12:42.903997 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26553c705c30e24ef4223ebe2ee0497320d5751cbb30b0dc649b30cc45c96fd9\": container with ID starting with 26553c705c30e24ef4223ebe2ee0497320d5751cbb30b0dc649b30cc45c96fd9 not found: ID does not exist" containerID="26553c705c30e24ef4223ebe2ee0497320d5751cbb30b0dc649b30cc45c96fd9" Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.904030 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26553c705c30e24ef4223ebe2ee0497320d5751cbb30b0dc649b30cc45c96fd9"} err="failed to get container status \"26553c705c30e24ef4223ebe2ee0497320d5751cbb30b0dc649b30cc45c96fd9\": rpc error: code = NotFound desc = could not find container \"26553c705c30e24ef4223ebe2ee0497320d5751cbb30b0dc649b30cc45c96fd9\": container with ID starting with 26553c705c30e24ef4223ebe2ee0497320d5751cbb30b0dc649b30cc45c96fd9 not found: ID does not exist" Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.904054 4809 scope.go:117] "RemoveContainer" containerID="570a547c4439b8a8258ab742da1d9c1f56ed3e8b6a349b4b031e6322b4c02052" Sep 30 00:12:42 crc kubenswrapper[4809]: E0930 00:12:42.904478 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"570a547c4439b8a8258ab742da1d9c1f56ed3e8b6a349b4b031e6322b4c02052\": container with ID starting with 570a547c4439b8a8258ab742da1d9c1f56ed3e8b6a349b4b031e6322b4c02052 not found: ID does not exist" containerID="570a547c4439b8a8258ab742da1d9c1f56ed3e8b6a349b4b031e6322b4c02052" Sep 30 00:12:42 crc kubenswrapper[4809]: I0930 00:12:42.904511 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"570a547c4439b8a8258ab742da1d9c1f56ed3e8b6a349b4b031e6322b4c02052"} err="failed to get container status \"570a547c4439b8a8258ab742da1d9c1f56ed3e8b6a349b4b031e6322b4c02052\": rpc error: code = NotFound desc = could not find container \"570a547c4439b8a8258ab742da1d9c1f56ed3e8b6a349b4b031e6322b4c02052\": container with ID starting with 570a547c4439b8a8258ab742da1d9c1f56ed3e8b6a349b4b031e6322b4c02052 not found: ID does not exist" Sep 30 00:12:43 crc kubenswrapper[4809]: I0930 00:12:43.696855 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b54e11f-446c-47f6-9a5c-5a15853df320" path="/var/lib/kubelet/pods/7b54e11f-446c-47f6-9a5c-5a15853df320/volumes" Sep 30 00:12:50 crc kubenswrapper[4809]: I0930 00:12:50.614832 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-d9lrc"] Sep 30 00:12:55 crc kubenswrapper[4809]: I0930 00:12:55.324798 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:12:55 crc kubenswrapper[4809]: I0930 00:12:55.325080 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:12:55 crc kubenswrapper[4809]: I0930 00:12:55.325122 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:12:55 crc kubenswrapper[4809]: I0930 00:12:55.325578 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:12:55 crc kubenswrapper[4809]: I0930 00:12:55.325664 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408" gracePeriod=600 Sep 30 00:12:55 crc kubenswrapper[4809]: I0930 00:12:55.913489 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408" exitCode=0 Sep 30 00:12:55 crc kubenswrapper[4809]: I0930 00:12:55.913608 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408"} Sep 30 00:12:55 crc kubenswrapper[4809]: I0930 00:12:55.914293 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"29022887a4c6fb6f640d22610d08caf7b703bbcb52c661cad5e3c432c0c8a806"} Sep 30 00:13:15 crc kubenswrapper[4809]: I0930 00:13:15.645894 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" podUID="0cb3f89e-4f46-45ca-be4e-948f5010dcc1" containerName="oauth-openshift" containerID="cri-o://d04a597c30950bcb52d6643516a5b537f3f62afd9ac99ffb4a99c68064bc2e37" gracePeriod=15 Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.046610 4809 generic.go:334] "Generic (PLEG): container finished" podID="0cb3f89e-4f46-45ca-be4e-948f5010dcc1" containerID="d04a597c30950bcb52d6643516a5b537f3f62afd9ac99ffb4a99c68064bc2e37" exitCode=0 Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.046723 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" event={"ID":"0cb3f89e-4f46-45ca-be4e-948f5010dcc1","Type":"ContainerDied","Data":"d04a597c30950bcb52d6643516a5b537f3f62afd9ac99ffb4a99c68064bc2e37"} Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.130725 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.200014 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-7f54ff7574-6w4h5"] Sep 30 00:13:16 crc kubenswrapper[4809]: E0930 00:13:16.200392 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="799d1bcb-02dd-4597-a154-3d3ce7b877db" containerName="extract-content" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.200417 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="799d1bcb-02dd-4597-a154-3d3ce7b877db" containerName="extract-content" Sep 30 00:13:16 crc kubenswrapper[4809]: E0930 00:13:16.200437 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774" containerName="extract-utilities" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.200451 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774" containerName="extract-utilities" Sep 30 00:13:16 crc kubenswrapper[4809]: E0930 00:13:16.200464 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="852e34d6-9b62-4f31-b9b2-78caac40540d" containerName="extract-content" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.200476 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="852e34d6-9b62-4f31-b9b2-78caac40540d" containerName="extract-content" Sep 30 00:13:16 crc kubenswrapper[4809]: E0930 00:13:16.200498 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="852e34d6-9b62-4f31-b9b2-78caac40540d" containerName="extract-utilities" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.200509 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="852e34d6-9b62-4f31-b9b2-78caac40540d" containerName="extract-utilities" Sep 30 00:13:16 crc kubenswrapper[4809]: E0930 00:13:16.200527 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa609401-afb6-40f1-adf0-a1a314f101fb" containerName="pruner" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.200540 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa609401-afb6-40f1-adf0-a1a314f101fb" containerName="pruner" Sep 30 00:13:16 crc kubenswrapper[4809]: E0930 00:13:16.200555 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="852e34d6-9b62-4f31-b9b2-78caac40540d" containerName="registry-server" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.200567 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="852e34d6-9b62-4f31-b9b2-78caac40540d" containerName="registry-server" Sep 30 00:13:16 crc kubenswrapper[4809]: E0930 00:13:16.200585 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="799d1bcb-02dd-4597-a154-3d3ce7b877db" containerName="extract-utilities" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.200597 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="799d1bcb-02dd-4597-a154-3d3ce7b877db" containerName="extract-utilities" Sep 30 00:13:16 crc kubenswrapper[4809]: E0930 00:13:16.200612 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="799d1bcb-02dd-4597-a154-3d3ce7b877db" containerName="registry-server" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.200624 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="799d1bcb-02dd-4597-a154-3d3ce7b877db" containerName="registry-server" Sep 30 00:13:16 crc kubenswrapper[4809]: E0930 00:13:16.200664 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59c82a04-264a-4f2f-b830-b8559fd743d6" containerName="pruner" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.200677 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="59c82a04-264a-4f2f-b830-b8559fd743d6" containerName="pruner" Sep 30 00:13:16 crc kubenswrapper[4809]: E0930 00:13:16.200694 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b54e11f-446c-47f6-9a5c-5a15853df320" containerName="registry-server" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.200706 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b54e11f-446c-47f6-9a5c-5a15853df320" containerName="registry-server" Sep 30 00:13:16 crc kubenswrapper[4809]: E0930 00:13:16.200720 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b54e11f-446c-47f6-9a5c-5a15853df320" containerName="extract-content" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.200732 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b54e11f-446c-47f6-9a5c-5a15853df320" containerName="extract-content" Sep 30 00:13:16 crc kubenswrapper[4809]: E0930 00:13:16.200753 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774" containerName="extract-content" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.200764 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774" containerName="extract-content" Sep 30 00:13:16 crc kubenswrapper[4809]: E0930 00:13:16.200783 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cb3f89e-4f46-45ca-be4e-948f5010dcc1" containerName="oauth-openshift" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.200796 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cb3f89e-4f46-45ca-be4e-948f5010dcc1" containerName="oauth-openshift" Sep 30 00:13:16 crc kubenswrapper[4809]: E0930 00:13:16.200814 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b54e11f-446c-47f6-9a5c-5a15853df320" containerName="extract-utilities" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.200825 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b54e11f-446c-47f6-9a5c-5a15853df320" containerName="extract-utilities" Sep 30 00:13:16 crc kubenswrapper[4809]: E0930 00:13:16.200840 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774" containerName="registry-server" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.200851 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774" containerName="registry-server" Sep 30 00:13:16 crc kubenswrapper[4809]: E0930 00:13:16.200866 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62227ec8-217f-461a-8116-079b9466a726" containerName="image-pruner" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.200877 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="62227ec8-217f-461a-8116-079b9466a726" containerName="image-pruner" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.201062 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="59c82a04-264a-4f2f-b830-b8559fd743d6" containerName="pruner" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.201079 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="558bc1b5-a1d1-4bfe-8fc0-bdc828ca4774" containerName="registry-server" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.201093 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cb3f89e-4f46-45ca-be4e-948f5010dcc1" containerName="oauth-openshift" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.201107 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa609401-afb6-40f1-adf0-a1a314f101fb" containerName="pruner" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.201121 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="62227ec8-217f-461a-8116-079b9466a726" containerName="image-pruner" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.201137 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="799d1bcb-02dd-4597-a154-3d3ce7b877db" containerName="registry-server" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.201151 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="852e34d6-9b62-4f31-b9b2-78caac40540d" containerName="registry-server" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.201166 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b54e11f-446c-47f6-9a5c-5a15853df320" containerName="registry-server" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.202677 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.209873 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7f54ff7574-6w4h5"] Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.296770 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-template-provider-selection\") pod \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.296833 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-audit-dir\") pod \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.296864 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gh672\" (UniqueName: \"kubernetes.io/projected/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-kube-api-access-gh672\") pod \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.296884 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-trusted-ca-bundle\") pod \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.296906 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-template-login\") pod \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.296941 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-ocp-branding-template\") pod \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.296966 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-service-ca\") pod \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.296992 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-router-certs\") pod \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.297009 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-serving-cert\") pod \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.297025 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-template-error\") pod \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.297047 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-idp-0-file-data\") pod \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.297065 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-session\") pod \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.297094 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-audit-policies\") pod \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.297110 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-cliconfig\") pod \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\" (UID: \"0cb3f89e-4f46-45ca-be4e-948f5010dcc1\") " Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.297922 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "0cb3f89e-4f46-45ca-be4e-948f5010dcc1" (UID: "0cb3f89e-4f46-45ca-be4e-948f5010dcc1"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.298282 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "0cb3f89e-4f46-45ca-be4e-948f5010dcc1" (UID: "0cb3f89e-4f46-45ca-be4e-948f5010dcc1"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.298303 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "0cb3f89e-4f46-45ca-be4e-948f5010dcc1" (UID: "0cb3f89e-4f46-45ca-be4e-948f5010dcc1"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.298385 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "0cb3f89e-4f46-45ca-be4e-948f5010dcc1" (UID: "0cb3f89e-4f46-45ca-be4e-948f5010dcc1"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.298385 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "0cb3f89e-4f46-45ca-be4e-948f5010dcc1" (UID: "0cb3f89e-4f46-45ca-be4e-948f5010dcc1"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.303106 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "0cb3f89e-4f46-45ca-be4e-948f5010dcc1" (UID: "0cb3f89e-4f46-45ca-be4e-948f5010dcc1"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.306863 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "0cb3f89e-4f46-45ca-be4e-948f5010dcc1" (UID: "0cb3f89e-4f46-45ca-be4e-948f5010dcc1"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.306914 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "0cb3f89e-4f46-45ca-be4e-948f5010dcc1" (UID: "0cb3f89e-4f46-45ca-be4e-948f5010dcc1"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.307424 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "0cb3f89e-4f46-45ca-be4e-948f5010dcc1" (UID: "0cb3f89e-4f46-45ca-be4e-948f5010dcc1"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.307864 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "0cb3f89e-4f46-45ca-be4e-948f5010dcc1" (UID: "0cb3f89e-4f46-45ca-be4e-948f5010dcc1"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.307996 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-kube-api-access-gh672" (OuterVolumeSpecName: "kube-api-access-gh672") pod "0cb3f89e-4f46-45ca-be4e-948f5010dcc1" (UID: "0cb3f89e-4f46-45ca-be4e-948f5010dcc1"). InnerVolumeSpecName "kube-api-access-gh672". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.311135 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "0cb3f89e-4f46-45ca-be4e-948f5010dcc1" (UID: "0cb3f89e-4f46-45ca-be4e-948f5010dcc1"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.311267 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "0cb3f89e-4f46-45ca-be4e-948f5010dcc1" (UID: "0cb3f89e-4f46-45ca-be4e-948f5010dcc1"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.311397 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "0cb3f89e-4f46-45ca-be4e-948f5010dcc1" (UID: "0cb3f89e-4f46-45ca-be4e-948f5010dcc1"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.398448 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.398498 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-service-ca\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.398519 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/45709509-ca70-4ad8-8c93-a210eb4fd618-audit-dir\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.398542 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/45709509-ca70-4ad8-8c93-a210eb4fd618-audit-policies\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.398564 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.398581 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.398665 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-router-certs\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.398696 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-user-template-error\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.398844 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.399025 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvxsx\" (UniqueName: \"kubernetes.io/projected/45709509-ca70-4ad8-8c93-a210eb4fd618-kube-api-access-wvxsx\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.399149 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.399256 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-user-template-login\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.399289 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-session\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.399321 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.399847 4809 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.399872 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.399904 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.399918 4809 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-audit-dir\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.399928 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gh672\" (UniqueName: \"kubernetes.io/projected/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-kube-api-access-gh672\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.399976 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.399988 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.400000 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.400011 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.400020 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.400030 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.400039 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.400049 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.400058 4809 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0cb3f89e-4f46-45ca-be4e-948f5010dcc1-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.502029 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.502125 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvxsx\" (UniqueName: \"kubernetes.io/projected/45709509-ca70-4ad8-8c93-a210eb4fd618-kube-api-access-wvxsx\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.502169 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.502211 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-session\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.502247 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-user-template-login\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.502303 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.502352 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.502391 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-service-ca\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.503090 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/45709509-ca70-4ad8-8c93-a210eb4fd618-audit-dir\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.503169 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/45709509-ca70-4ad8-8c93-a210eb4fd618-audit-policies\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.503240 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.503296 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.503357 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-router-certs\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.503429 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-user-template-error\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.504369 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-service-ca\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.504560 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/45709509-ca70-4ad8-8c93-a210eb4fd618-audit-dir\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.505745 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.506739 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.507085 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/45709509-ca70-4ad8-8c93-a210eb4fd618-audit-policies\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.507704 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-user-template-login\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.508137 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.508553 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-session\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.509488 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-router-certs\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.510074 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.510164 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-user-template-error\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.511124 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.512354 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/45709509-ca70-4ad8-8c93-a210eb4fd618-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.533079 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvxsx\" (UniqueName: \"kubernetes.io/projected/45709509-ca70-4ad8-8c93-a210eb4fd618-kube-api-access-wvxsx\") pod \"oauth-openshift-7f54ff7574-6w4h5\" (UID: \"45709509-ca70-4ad8-8c93-a210eb4fd618\") " pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:16 crc kubenswrapper[4809]: I0930 00:13:16.832031 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:17 crc kubenswrapper[4809]: I0930 00:13:17.057126 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" event={"ID":"0cb3f89e-4f46-45ca-be4e-948f5010dcc1","Type":"ContainerDied","Data":"aa0d04cafc72161113ca3765643c8d3260fba99e7cc45ca4c0ed3866f475a9aa"} Sep 30 00:13:17 crc kubenswrapper[4809]: I0930 00:13:17.057475 4809 scope.go:117] "RemoveContainer" containerID="d04a597c30950bcb52d6643516a5b537f3f62afd9ac99ffb4a99c68064bc2e37" Sep 30 00:13:17 crc kubenswrapper[4809]: I0930 00:13:17.057189 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-d9lrc" Sep 30 00:13:17 crc kubenswrapper[4809]: I0930 00:13:17.099810 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-d9lrc"] Sep 30 00:13:17 crc kubenswrapper[4809]: I0930 00:13:17.104211 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-d9lrc"] Sep 30 00:13:17 crc kubenswrapper[4809]: I0930 00:13:17.165557 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7f54ff7574-6w4h5"] Sep 30 00:13:17 crc kubenswrapper[4809]: I0930 00:13:17.704058 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cb3f89e-4f46-45ca-be4e-948f5010dcc1" path="/var/lib/kubelet/pods/0cb3f89e-4f46-45ca-be4e-948f5010dcc1/volumes" Sep 30 00:13:18 crc kubenswrapper[4809]: I0930 00:13:18.069051 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" event={"ID":"45709509-ca70-4ad8-8c93-a210eb4fd618","Type":"ContainerStarted","Data":"f21a4557dcfe7f9ed91ec39ee9e800d0d53ee771d4c34f2d12b79d467c3dc6a5"} Sep 30 00:13:18 crc kubenswrapper[4809]: I0930 00:13:18.069142 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" event={"ID":"45709509-ca70-4ad8-8c93-a210eb4fd618","Type":"ContainerStarted","Data":"6ae6c690086357c980a07527d22e9dcc69c7dcfde9ca00c6174efd2bede4acb6"} Sep 30 00:13:18 crc kubenswrapper[4809]: I0930 00:13:18.069479 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:18 crc kubenswrapper[4809]: I0930 00:13:18.080388 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" Sep 30 00:13:18 crc kubenswrapper[4809]: I0930 00:13:18.107324 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-7f54ff7574-6w4h5" podStartSLOduration=28.107296062 podStartE2EDuration="28.107296062s" podCreationTimestamp="2025-09-30 00:12:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:13:18.103190436 +0000 UTC m=+249.139439874" watchObservedRunningTime="2025-09-30 00:13:18.107296062 +0000 UTC m=+249.143545500" Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.477427 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hjqb8"] Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.478517 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hjqb8" podUID="00358083-724a-47cb-b376-1cc3d97a9fab" containerName="registry-server" containerID="cri-o://65b6cfa940b9e46dfd89945c770df52aff88b265c2941de6c0761c7716819e32" gracePeriod=30 Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.485519 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-j2ssl"] Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.486360 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-j2ssl" podUID="9c28e622-d89b-4819-b346-5cc07af83b8b" containerName="registry-server" containerID="cri-o://7a192ee85a510a47f5ee07c8f2d881fd34d66b53daebebf03176d0c13d09d096" gracePeriod=30 Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.506581 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-clhm9"] Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.506966 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" podUID="edff5a66-7ade-44f3-a770-62a25d56e674" containerName="marketplace-operator" containerID="cri-o://7ffff9cf5803dc0ec65cbbdf73a62bfa358aad7d176b13c8bcf397e14e734ff8" gracePeriod=30 Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.516794 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l74tx"] Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.517132 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-l74tx" podUID="31398a45-e7dc-4b71-81ac-0fc5c16b9c26" containerName="registry-server" containerID="cri-o://ad435bd229a89a247a262af19b3e46f8f29ba298f78e3929149fc62d93962853" gracePeriod=30 Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.533448 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8w9pr"] Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.533999 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8w9pr" podUID="a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2" containerName="registry-server" containerID="cri-o://3d283f39ae196ccd3a42af35850a2f092579b0649ce88fabd1be9e74fc17c2cb" gracePeriod=30 Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.539221 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tcqkz"] Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.540049 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tcqkz" Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.543159 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tcqkz"] Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.619813 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4983b18e-5198-4c30-874a-50c718310352-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tcqkz\" (UID: \"4983b18e-5198-4c30-874a-50c718310352\") " pod="openshift-marketplace/marketplace-operator-79b997595-tcqkz" Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.620260 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mqfl\" (UniqueName: \"kubernetes.io/projected/4983b18e-5198-4c30-874a-50c718310352-kube-api-access-5mqfl\") pod \"marketplace-operator-79b997595-tcqkz\" (UID: \"4983b18e-5198-4c30-874a-50c718310352\") " pod="openshift-marketplace/marketplace-operator-79b997595-tcqkz" Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.620342 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4983b18e-5198-4c30-874a-50c718310352-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tcqkz\" (UID: \"4983b18e-5198-4c30-874a-50c718310352\") " pod="openshift-marketplace/marketplace-operator-79b997595-tcqkz" Sep 30 00:13:28 crc kubenswrapper[4809]: E0930 00:13:28.632332 4809 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7a192ee85a510a47f5ee07c8f2d881fd34d66b53daebebf03176d0c13d09d096 is running failed: container process not found" containerID="7a192ee85a510a47f5ee07c8f2d881fd34d66b53daebebf03176d0c13d09d096" cmd=["grpc_health_probe","-addr=:50051"] Sep 30 00:13:28 crc kubenswrapper[4809]: E0930 00:13:28.633338 4809 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7a192ee85a510a47f5ee07c8f2d881fd34d66b53daebebf03176d0c13d09d096 is running failed: container process not found" containerID="7a192ee85a510a47f5ee07c8f2d881fd34d66b53daebebf03176d0c13d09d096" cmd=["grpc_health_probe","-addr=:50051"] Sep 30 00:13:28 crc kubenswrapper[4809]: E0930 00:13:28.634046 4809 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7a192ee85a510a47f5ee07c8f2d881fd34d66b53daebebf03176d0c13d09d096 is running failed: container process not found" containerID="7a192ee85a510a47f5ee07c8f2d881fd34d66b53daebebf03176d0c13d09d096" cmd=["grpc_health_probe","-addr=:50051"] Sep 30 00:13:28 crc kubenswrapper[4809]: E0930 00:13:28.634172 4809 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7a192ee85a510a47f5ee07c8f2d881fd34d66b53daebebf03176d0c13d09d096 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-j2ssl" podUID="9c28e622-d89b-4819-b346-5cc07af83b8b" containerName="registry-server" Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.721844 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4983b18e-5198-4c30-874a-50c718310352-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tcqkz\" (UID: \"4983b18e-5198-4c30-874a-50c718310352\") " pod="openshift-marketplace/marketplace-operator-79b997595-tcqkz" Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.721920 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4983b18e-5198-4c30-874a-50c718310352-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tcqkz\" (UID: \"4983b18e-5198-4c30-874a-50c718310352\") " pod="openshift-marketplace/marketplace-operator-79b997595-tcqkz" Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.721947 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mqfl\" (UniqueName: \"kubernetes.io/projected/4983b18e-5198-4c30-874a-50c718310352-kube-api-access-5mqfl\") pod \"marketplace-operator-79b997595-tcqkz\" (UID: \"4983b18e-5198-4c30-874a-50c718310352\") " pod="openshift-marketplace/marketplace-operator-79b997595-tcqkz" Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.724001 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4983b18e-5198-4c30-874a-50c718310352-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tcqkz\" (UID: \"4983b18e-5198-4c30-874a-50c718310352\") " pod="openshift-marketplace/marketplace-operator-79b997595-tcqkz" Sep 30 00:13:28 crc kubenswrapper[4809]: E0930 00:13:28.725429 4809 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 65b6cfa940b9e46dfd89945c770df52aff88b265c2941de6c0761c7716819e32 is running failed: container process not found" containerID="65b6cfa940b9e46dfd89945c770df52aff88b265c2941de6c0761c7716819e32" cmd=["grpc_health_probe","-addr=:50051"] Sep 30 00:13:28 crc kubenswrapper[4809]: E0930 00:13:28.725880 4809 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 65b6cfa940b9e46dfd89945c770df52aff88b265c2941de6c0761c7716819e32 is running failed: container process not found" containerID="65b6cfa940b9e46dfd89945c770df52aff88b265c2941de6c0761c7716819e32" cmd=["grpc_health_probe","-addr=:50051"] Sep 30 00:13:28 crc kubenswrapper[4809]: E0930 00:13:28.728776 4809 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 65b6cfa940b9e46dfd89945c770df52aff88b265c2941de6c0761c7716819e32 is running failed: container process not found" containerID="65b6cfa940b9e46dfd89945c770df52aff88b265c2941de6c0761c7716819e32" cmd=["grpc_health_probe","-addr=:50051"] Sep 30 00:13:28 crc kubenswrapper[4809]: E0930 00:13:28.728822 4809 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 65b6cfa940b9e46dfd89945c770df52aff88b265c2941de6c0761c7716819e32 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-hjqb8" podUID="00358083-724a-47cb-b376-1cc3d97a9fab" containerName="registry-server" Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.730529 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4983b18e-5198-4c30-874a-50c718310352-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tcqkz\" (UID: \"4983b18e-5198-4c30-874a-50c718310352\") " pod="openshift-marketplace/marketplace-operator-79b997595-tcqkz" Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.740580 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mqfl\" (UniqueName: \"kubernetes.io/projected/4983b18e-5198-4c30-874a-50c718310352-kube-api-access-5mqfl\") pod \"marketplace-operator-79b997595-tcqkz\" (UID: \"4983b18e-5198-4c30-874a-50c718310352\") " pod="openshift-marketplace/marketplace-operator-79b997595-tcqkz" Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.912700 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tcqkz" Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.924407 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hjqb8" Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.948995 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j2ssl" Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.973928 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" Sep 30 00:13:28 crc kubenswrapper[4809]: I0930 00:13:28.980631 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l74tx" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.027866 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hhw6l\" (UniqueName: \"kubernetes.io/projected/9c28e622-d89b-4819-b346-5cc07af83b8b-kube-api-access-hhw6l\") pod \"9c28e622-d89b-4819-b346-5cc07af83b8b\" (UID: \"9c28e622-d89b-4819-b346-5cc07af83b8b\") " Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.028096 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00358083-724a-47cb-b376-1cc3d97a9fab-utilities\") pod \"00358083-724a-47cb-b376-1cc3d97a9fab\" (UID: \"00358083-724a-47cb-b376-1cc3d97a9fab\") " Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.028174 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00358083-724a-47cb-b376-1cc3d97a9fab-catalog-content\") pod \"00358083-724a-47cb-b376-1cc3d97a9fab\" (UID: \"00358083-724a-47cb-b376-1cc3d97a9fab\") " Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.028270 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c28e622-d89b-4819-b346-5cc07af83b8b-utilities\") pod \"9c28e622-d89b-4819-b346-5cc07af83b8b\" (UID: \"9c28e622-d89b-4819-b346-5cc07af83b8b\") " Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.028373 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c28e622-d89b-4819-b346-5cc07af83b8b-catalog-content\") pod \"9c28e622-d89b-4819-b346-5cc07af83b8b\" (UID: \"9c28e622-d89b-4819-b346-5cc07af83b8b\") " Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.028484 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmvk6\" (UniqueName: \"kubernetes.io/projected/00358083-724a-47cb-b376-1cc3d97a9fab-kube-api-access-nmvk6\") pod \"00358083-724a-47cb-b376-1cc3d97a9fab\" (UID: \"00358083-724a-47cb-b376-1cc3d97a9fab\") " Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.029596 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00358083-724a-47cb-b376-1cc3d97a9fab-utilities" (OuterVolumeSpecName: "utilities") pod "00358083-724a-47cb-b376-1cc3d97a9fab" (UID: "00358083-724a-47cb-b376-1cc3d97a9fab"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.030005 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8w9pr" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.030356 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c28e622-d89b-4819-b346-5cc07af83b8b-utilities" (OuterVolumeSpecName: "utilities") pod "9c28e622-d89b-4819-b346-5cc07af83b8b" (UID: "9c28e622-d89b-4819-b346-5cc07af83b8b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.030924 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c28e622-d89b-4819-b346-5cc07af83b8b-kube-api-access-hhw6l" (OuterVolumeSpecName: "kube-api-access-hhw6l") pod "9c28e622-d89b-4819-b346-5cc07af83b8b" (UID: "9c28e622-d89b-4819-b346-5cc07af83b8b"). InnerVolumeSpecName "kube-api-access-hhw6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.047402 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00358083-724a-47cb-b376-1cc3d97a9fab-kube-api-access-nmvk6" (OuterVolumeSpecName: "kube-api-access-nmvk6") pod "00358083-724a-47cb-b376-1cc3d97a9fab" (UID: "00358083-724a-47cb-b376-1cc3d97a9fab"). InnerVolumeSpecName "kube-api-access-nmvk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.096069 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c28e622-d89b-4819-b346-5cc07af83b8b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9c28e622-d89b-4819-b346-5cc07af83b8b" (UID: "9c28e622-d89b-4819-b346-5cc07af83b8b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.099618 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00358083-724a-47cb-b376-1cc3d97a9fab-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "00358083-724a-47cb-b376-1cc3d97a9fab" (UID: "00358083-724a-47cb-b376-1cc3d97a9fab"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.129479 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2-catalog-content\") pod \"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2\" (UID: \"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2\") " Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.129509 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/edff5a66-7ade-44f3-a770-62a25d56e674-marketplace-trusted-ca\") pod \"edff5a66-7ade-44f3-a770-62a25d56e674\" (UID: \"edff5a66-7ade-44f3-a770-62a25d56e674\") " Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.129530 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31398a45-e7dc-4b71-81ac-0fc5c16b9c26-utilities\") pod \"31398a45-e7dc-4b71-81ac-0fc5c16b9c26\" (UID: \"31398a45-e7dc-4b71-81ac-0fc5c16b9c26\") " Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.129565 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31398a45-e7dc-4b71-81ac-0fc5c16b9c26-catalog-content\") pod \"31398a45-e7dc-4b71-81ac-0fc5c16b9c26\" (UID: \"31398a45-e7dc-4b71-81ac-0fc5c16b9c26\") " Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.129584 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hb576\" (UniqueName: \"kubernetes.io/projected/31398a45-e7dc-4b71-81ac-0fc5c16b9c26-kube-api-access-hb576\") pod \"31398a45-e7dc-4b71-81ac-0fc5c16b9c26\" (UID: \"31398a45-e7dc-4b71-81ac-0fc5c16b9c26\") " Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.129602 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2-utilities\") pod \"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2\" (UID: \"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2\") " Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.129680 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbgvx\" (UniqueName: \"kubernetes.io/projected/a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2-kube-api-access-cbgvx\") pod \"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2\" (UID: \"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2\") " Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.129727 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/edff5a66-7ade-44f3-a770-62a25d56e674-marketplace-operator-metrics\") pod \"edff5a66-7ade-44f3-a770-62a25d56e674\" (UID: \"edff5a66-7ade-44f3-a770-62a25d56e674\") " Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.129751 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wpmrk\" (UniqueName: \"kubernetes.io/projected/edff5a66-7ade-44f3-a770-62a25d56e674-kube-api-access-wpmrk\") pod \"edff5a66-7ade-44f3-a770-62a25d56e674\" (UID: \"edff5a66-7ade-44f3-a770-62a25d56e674\") " Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.130051 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hhw6l\" (UniqueName: \"kubernetes.io/projected/9c28e622-d89b-4819-b346-5cc07af83b8b-kube-api-access-hhw6l\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.130137 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00358083-724a-47cb-b376-1cc3d97a9fab-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.130150 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00358083-724a-47cb-b376-1cc3d97a9fab-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.130161 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c28e622-d89b-4819-b346-5cc07af83b8b-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.130169 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c28e622-d89b-4819-b346-5cc07af83b8b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.130177 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmvk6\" (UniqueName: \"kubernetes.io/projected/00358083-724a-47cb-b376-1cc3d97a9fab-kube-api-access-nmvk6\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.131572 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2-utilities" (OuterVolumeSpecName: "utilities") pod "a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2" (UID: "a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.131694 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31398a45-e7dc-4b71-81ac-0fc5c16b9c26-utilities" (OuterVolumeSpecName: "utilities") pod "31398a45-e7dc-4b71-81ac-0fc5c16b9c26" (UID: "31398a45-e7dc-4b71-81ac-0fc5c16b9c26"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.132062 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/edff5a66-7ade-44f3-a770-62a25d56e674-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "edff5a66-7ade-44f3-a770-62a25d56e674" (UID: "edff5a66-7ade-44f3-a770-62a25d56e674"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.133124 4809 generic.go:334] "Generic (PLEG): container finished" podID="00358083-724a-47cb-b376-1cc3d97a9fab" containerID="65b6cfa940b9e46dfd89945c770df52aff88b265c2941de6c0761c7716819e32" exitCode=0 Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.133211 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hjqb8" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.133839 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hjqb8" event={"ID":"00358083-724a-47cb-b376-1cc3d97a9fab","Type":"ContainerDied","Data":"65b6cfa940b9e46dfd89945c770df52aff88b265c2941de6c0761c7716819e32"} Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.134290 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hjqb8" event={"ID":"00358083-724a-47cb-b376-1cc3d97a9fab","Type":"ContainerDied","Data":"7169470348fee3b9451cd697565912d96b15faae1ae205536f427001c6498704"} Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.134363 4809 scope.go:117] "RemoveContainer" containerID="65b6cfa940b9e46dfd89945c770df52aff88b265c2941de6c0761c7716819e32" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.135152 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31398a45-e7dc-4b71-81ac-0fc5c16b9c26-kube-api-access-hb576" (OuterVolumeSpecName: "kube-api-access-hb576") pod "31398a45-e7dc-4b71-81ac-0fc5c16b9c26" (UID: "31398a45-e7dc-4b71-81ac-0fc5c16b9c26"). InnerVolumeSpecName "kube-api-access-hb576". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.135532 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/edff5a66-7ade-44f3-a770-62a25d56e674-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "edff5a66-7ade-44f3-a770-62a25d56e674" (UID: "edff5a66-7ade-44f3-a770-62a25d56e674"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.139978 4809 generic.go:334] "Generic (PLEG): container finished" podID="31398a45-e7dc-4b71-81ac-0fc5c16b9c26" containerID="ad435bd229a89a247a262af19b3e46f8f29ba298f78e3929149fc62d93962853" exitCode=0 Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.140057 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l74tx" event={"ID":"31398a45-e7dc-4b71-81ac-0fc5c16b9c26","Type":"ContainerDied","Data":"ad435bd229a89a247a262af19b3e46f8f29ba298f78e3929149fc62d93962853"} Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.140086 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l74tx" event={"ID":"31398a45-e7dc-4b71-81ac-0fc5c16b9c26","Type":"ContainerDied","Data":"072b5de555a53a1641e40b0fbf3bb5c38e268082de986ff06e74a47c943cc429"} Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.140146 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l74tx" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.142939 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edff5a66-7ade-44f3-a770-62a25d56e674-kube-api-access-wpmrk" (OuterVolumeSpecName: "kube-api-access-wpmrk") pod "edff5a66-7ade-44f3-a770-62a25d56e674" (UID: "edff5a66-7ade-44f3-a770-62a25d56e674"). InnerVolumeSpecName "kube-api-access-wpmrk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.144062 4809 generic.go:334] "Generic (PLEG): container finished" podID="edff5a66-7ade-44f3-a770-62a25d56e674" containerID="7ffff9cf5803dc0ec65cbbdf73a62bfa358aad7d176b13c8bcf397e14e734ff8" exitCode=0 Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.144108 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" event={"ID":"edff5a66-7ade-44f3-a770-62a25d56e674","Type":"ContainerDied","Data":"7ffff9cf5803dc0ec65cbbdf73a62bfa358aad7d176b13c8bcf397e14e734ff8"} Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.144129 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" event={"ID":"edff5a66-7ade-44f3-a770-62a25d56e674","Type":"ContainerDied","Data":"aa412240d9904248be6cb342e7aafbe041cd5613ada4c31664e5a8f89d1bef81"} Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.144165 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-clhm9" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.146211 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31398a45-e7dc-4b71-81ac-0fc5c16b9c26-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "31398a45-e7dc-4b71-81ac-0fc5c16b9c26" (UID: "31398a45-e7dc-4b71-81ac-0fc5c16b9c26"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.147906 4809 generic.go:334] "Generic (PLEG): container finished" podID="9c28e622-d89b-4819-b346-5cc07af83b8b" containerID="7a192ee85a510a47f5ee07c8f2d881fd34d66b53daebebf03176d0c13d09d096" exitCode=0 Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.147978 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j2ssl" event={"ID":"9c28e622-d89b-4819-b346-5cc07af83b8b","Type":"ContainerDied","Data":"7a192ee85a510a47f5ee07c8f2d881fd34d66b53daebebf03176d0c13d09d096"} Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.148003 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j2ssl" event={"ID":"9c28e622-d89b-4819-b346-5cc07af83b8b","Type":"ContainerDied","Data":"8083ab2b46eebd1a24130e29c9f0760ae16a7c0c51f264d9dffe59683520393c"} Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.148085 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j2ssl" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.154916 4809 generic.go:334] "Generic (PLEG): container finished" podID="a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2" containerID="3d283f39ae196ccd3a42af35850a2f092579b0649ce88fabd1be9e74fc17c2cb" exitCode=0 Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.154950 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8w9pr" event={"ID":"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2","Type":"ContainerDied","Data":"3d283f39ae196ccd3a42af35850a2f092579b0649ce88fabd1be9e74fc17c2cb"} Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.154975 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8w9pr" event={"ID":"a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2","Type":"ContainerDied","Data":"786100e9867dcf415b524ae36826841a74a3c4bb9f11fb48cd64050d22827bd2"} Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.155033 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8w9pr" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.158307 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2-kube-api-access-cbgvx" (OuterVolumeSpecName: "kube-api-access-cbgvx") pod "a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2" (UID: "a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2"). InnerVolumeSpecName "kube-api-access-cbgvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.158377 4809 scope.go:117] "RemoveContainer" containerID="bbb8d99a399cf4efd9c91c67831b7f5120b8fe1116b71b3e87aea1aacf7e1015" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.179220 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hjqb8"] Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.181999 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hjqb8"] Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.189908 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-clhm9"] Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.194365 4809 scope.go:117] "RemoveContainer" containerID="6b55ced28742dec7e21edc8906e00c76699bc7a6ccec6428dcda8a8367e14960" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.194729 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-clhm9"] Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.204742 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-j2ssl"] Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.207166 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-j2ssl"] Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.210524 4809 scope.go:117] "RemoveContainer" containerID="65b6cfa940b9e46dfd89945c770df52aff88b265c2941de6c0761c7716819e32" Sep 30 00:13:29 crc kubenswrapper[4809]: E0930 00:13:29.211353 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65b6cfa940b9e46dfd89945c770df52aff88b265c2941de6c0761c7716819e32\": container with ID starting with 65b6cfa940b9e46dfd89945c770df52aff88b265c2941de6c0761c7716819e32 not found: ID does not exist" containerID="65b6cfa940b9e46dfd89945c770df52aff88b265c2941de6c0761c7716819e32" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.211415 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65b6cfa940b9e46dfd89945c770df52aff88b265c2941de6c0761c7716819e32"} err="failed to get container status \"65b6cfa940b9e46dfd89945c770df52aff88b265c2941de6c0761c7716819e32\": rpc error: code = NotFound desc = could not find container \"65b6cfa940b9e46dfd89945c770df52aff88b265c2941de6c0761c7716819e32\": container with ID starting with 65b6cfa940b9e46dfd89945c770df52aff88b265c2941de6c0761c7716819e32 not found: ID does not exist" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.211461 4809 scope.go:117] "RemoveContainer" containerID="bbb8d99a399cf4efd9c91c67831b7f5120b8fe1116b71b3e87aea1aacf7e1015" Sep 30 00:13:29 crc kubenswrapper[4809]: E0930 00:13:29.212333 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbb8d99a399cf4efd9c91c67831b7f5120b8fe1116b71b3e87aea1aacf7e1015\": container with ID starting with bbb8d99a399cf4efd9c91c67831b7f5120b8fe1116b71b3e87aea1aacf7e1015 not found: ID does not exist" containerID="bbb8d99a399cf4efd9c91c67831b7f5120b8fe1116b71b3e87aea1aacf7e1015" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.212382 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbb8d99a399cf4efd9c91c67831b7f5120b8fe1116b71b3e87aea1aacf7e1015"} err="failed to get container status \"bbb8d99a399cf4efd9c91c67831b7f5120b8fe1116b71b3e87aea1aacf7e1015\": rpc error: code = NotFound desc = could not find container \"bbb8d99a399cf4efd9c91c67831b7f5120b8fe1116b71b3e87aea1aacf7e1015\": container with ID starting with bbb8d99a399cf4efd9c91c67831b7f5120b8fe1116b71b3e87aea1aacf7e1015 not found: ID does not exist" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.212411 4809 scope.go:117] "RemoveContainer" containerID="6b55ced28742dec7e21edc8906e00c76699bc7a6ccec6428dcda8a8367e14960" Sep 30 00:13:29 crc kubenswrapper[4809]: E0930 00:13:29.212905 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b55ced28742dec7e21edc8906e00c76699bc7a6ccec6428dcda8a8367e14960\": container with ID starting with 6b55ced28742dec7e21edc8906e00c76699bc7a6ccec6428dcda8a8367e14960 not found: ID does not exist" containerID="6b55ced28742dec7e21edc8906e00c76699bc7a6ccec6428dcda8a8367e14960" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.212943 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b55ced28742dec7e21edc8906e00c76699bc7a6ccec6428dcda8a8367e14960"} err="failed to get container status \"6b55ced28742dec7e21edc8906e00c76699bc7a6ccec6428dcda8a8367e14960\": rpc error: code = NotFound desc = could not find container \"6b55ced28742dec7e21edc8906e00c76699bc7a6ccec6428dcda8a8367e14960\": container with ID starting with 6b55ced28742dec7e21edc8906e00c76699bc7a6ccec6428dcda8a8367e14960 not found: ID does not exist" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.212970 4809 scope.go:117] "RemoveContainer" containerID="ad435bd229a89a247a262af19b3e46f8f29ba298f78e3929149fc62d93962853" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.225936 4809 scope.go:117] "RemoveContainer" containerID="632d98fd1c6f44e71cf36faccdf3825b0b11e36407c4c1892e71e74d32c8438b" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.231386 4809 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/edff5a66-7ade-44f3-a770-62a25d56e674-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.231414 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wpmrk\" (UniqueName: \"kubernetes.io/projected/edff5a66-7ade-44f3-a770-62a25d56e674-kube-api-access-wpmrk\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.231427 4809 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/edff5a66-7ade-44f3-a770-62a25d56e674-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.231438 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31398a45-e7dc-4b71-81ac-0fc5c16b9c26-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.231450 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31398a45-e7dc-4b71-81ac-0fc5c16b9c26-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.231463 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hb576\" (UniqueName: \"kubernetes.io/projected/31398a45-e7dc-4b71-81ac-0fc5c16b9c26-kube-api-access-hb576\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.231474 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.231486 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbgvx\" (UniqueName: \"kubernetes.io/projected/a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2-kube-api-access-cbgvx\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.240716 4809 scope.go:117] "RemoveContainer" containerID="54dec868cc1480cdea0fb9400b612e1c93c150c88b0dc8537f67d30efb13b565" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.253061 4809 scope.go:117] "RemoveContainer" containerID="ad435bd229a89a247a262af19b3e46f8f29ba298f78e3929149fc62d93962853" Sep 30 00:13:29 crc kubenswrapper[4809]: E0930 00:13:29.253681 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad435bd229a89a247a262af19b3e46f8f29ba298f78e3929149fc62d93962853\": container with ID starting with ad435bd229a89a247a262af19b3e46f8f29ba298f78e3929149fc62d93962853 not found: ID does not exist" containerID="ad435bd229a89a247a262af19b3e46f8f29ba298f78e3929149fc62d93962853" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.253727 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad435bd229a89a247a262af19b3e46f8f29ba298f78e3929149fc62d93962853"} err="failed to get container status \"ad435bd229a89a247a262af19b3e46f8f29ba298f78e3929149fc62d93962853\": rpc error: code = NotFound desc = could not find container \"ad435bd229a89a247a262af19b3e46f8f29ba298f78e3929149fc62d93962853\": container with ID starting with ad435bd229a89a247a262af19b3e46f8f29ba298f78e3929149fc62d93962853 not found: ID does not exist" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.253760 4809 scope.go:117] "RemoveContainer" containerID="632d98fd1c6f44e71cf36faccdf3825b0b11e36407c4c1892e71e74d32c8438b" Sep 30 00:13:29 crc kubenswrapper[4809]: E0930 00:13:29.254831 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"632d98fd1c6f44e71cf36faccdf3825b0b11e36407c4c1892e71e74d32c8438b\": container with ID starting with 632d98fd1c6f44e71cf36faccdf3825b0b11e36407c4c1892e71e74d32c8438b not found: ID does not exist" containerID="632d98fd1c6f44e71cf36faccdf3825b0b11e36407c4c1892e71e74d32c8438b" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.254886 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"632d98fd1c6f44e71cf36faccdf3825b0b11e36407c4c1892e71e74d32c8438b"} err="failed to get container status \"632d98fd1c6f44e71cf36faccdf3825b0b11e36407c4c1892e71e74d32c8438b\": rpc error: code = NotFound desc = could not find container \"632d98fd1c6f44e71cf36faccdf3825b0b11e36407c4c1892e71e74d32c8438b\": container with ID starting with 632d98fd1c6f44e71cf36faccdf3825b0b11e36407c4c1892e71e74d32c8438b not found: ID does not exist" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.254926 4809 scope.go:117] "RemoveContainer" containerID="54dec868cc1480cdea0fb9400b612e1c93c150c88b0dc8537f67d30efb13b565" Sep 30 00:13:29 crc kubenswrapper[4809]: E0930 00:13:29.255879 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54dec868cc1480cdea0fb9400b612e1c93c150c88b0dc8537f67d30efb13b565\": container with ID starting with 54dec868cc1480cdea0fb9400b612e1c93c150c88b0dc8537f67d30efb13b565 not found: ID does not exist" containerID="54dec868cc1480cdea0fb9400b612e1c93c150c88b0dc8537f67d30efb13b565" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.255909 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54dec868cc1480cdea0fb9400b612e1c93c150c88b0dc8537f67d30efb13b565"} err="failed to get container status \"54dec868cc1480cdea0fb9400b612e1c93c150c88b0dc8537f67d30efb13b565\": rpc error: code = NotFound desc = could not find container \"54dec868cc1480cdea0fb9400b612e1c93c150c88b0dc8537f67d30efb13b565\": container with ID starting with 54dec868cc1480cdea0fb9400b612e1c93c150c88b0dc8537f67d30efb13b565 not found: ID does not exist" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.255924 4809 scope.go:117] "RemoveContainer" containerID="7ffff9cf5803dc0ec65cbbdf73a62bfa358aad7d176b13c8bcf397e14e734ff8" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.268348 4809 scope.go:117] "RemoveContainer" containerID="7ffff9cf5803dc0ec65cbbdf73a62bfa358aad7d176b13c8bcf397e14e734ff8" Sep 30 00:13:29 crc kubenswrapper[4809]: E0930 00:13:29.268893 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ffff9cf5803dc0ec65cbbdf73a62bfa358aad7d176b13c8bcf397e14e734ff8\": container with ID starting with 7ffff9cf5803dc0ec65cbbdf73a62bfa358aad7d176b13c8bcf397e14e734ff8 not found: ID does not exist" containerID="7ffff9cf5803dc0ec65cbbdf73a62bfa358aad7d176b13c8bcf397e14e734ff8" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.268941 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ffff9cf5803dc0ec65cbbdf73a62bfa358aad7d176b13c8bcf397e14e734ff8"} err="failed to get container status \"7ffff9cf5803dc0ec65cbbdf73a62bfa358aad7d176b13c8bcf397e14e734ff8\": rpc error: code = NotFound desc = could not find container \"7ffff9cf5803dc0ec65cbbdf73a62bfa358aad7d176b13c8bcf397e14e734ff8\": container with ID starting with 7ffff9cf5803dc0ec65cbbdf73a62bfa358aad7d176b13c8bcf397e14e734ff8 not found: ID does not exist" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.268972 4809 scope.go:117] "RemoveContainer" containerID="7a192ee85a510a47f5ee07c8f2d881fd34d66b53daebebf03176d0c13d09d096" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.270694 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2" (UID: "a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.288186 4809 scope.go:117] "RemoveContainer" containerID="39e1d80044b22d3d37c142c444487a4306141ff1d77dee215b6a61dd4f928a93" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.303794 4809 scope.go:117] "RemoveContainer" containerID="d532a198802e52e25985eb30ae8892859295dce6e8f4089c7ec4821401e9c47d" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.323616 4809 scope.go:117] "RemoveContainer" containerID="7a192ee85a510a47f5ee07c8f2d881fd34d66b53daebebf03176d0c13d09d096" Sep 30 00:13:29 crc kubenswrapper[4809]: E0930 00:13:29.324105 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a192ee85a510a47f5ee07c8f2d881fd34d66b53daebebf03176d0c13d09d096\": container with ID starting with 7a192ee85a510a47f5ee07c8f2d881fd34d66b53daebebf03176d0c13d09d096 not found: ID does not exist" containerID="7a192ee85a510a47f5ee07c8f2d881fd34d66b53daebebf03176d0c13d09d096" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.324196 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a192ee85a510a47f5ee07c8f2d881fd34d66b53daebebf03176d0c13d09d096"} err="failed to get container status \"7a192ee85a510a47f5ee07c8f2d881fd34d66b53daebebf03176d0c13d09d096\": rpc error: code = NotFound desc = could not find container \"7a192ee85a510a47f5ee07c8f2d881fd34d66b53daebebf03176d0c13d09d096\": container with ID starting with 7a192ee85a510a47f5ee07c8f2d881fd34d66b53daebebf03176d0c13d09d096 not found: ID does not exist" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.324261 4809 scope.go:117] "RemoveContainer" containerID="39e1d80044b22d3d37c142c444487a4306141ff1d77dee215b6a61dd4f928a93" Sep 30 00:13:29 crc kubenswrapper[4809]: E0930 00:13:29.324692 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39e1d80044b22d3d37c142c444487a4306141ff1d77dee215b6a61dd4f928a93\": container with ID starting with 39e1d80044b22d3d37c142c444487a4306141ff1d77dee215b6a61dd4f928a93 not found: ID does not exist" containerID="39e1d80044b22d3d37c142c444487a4306141ff1d77dee215b6a61dd4f928a93" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.324747 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39e1d80044b22d3d37c142c444487a4306141ff1d77dee215b6a61dd4f928a93"} err="failed to get container status \"39e1d80044b22d3d37c142c444487a4306141ff1d77dee215b6a61dd4f928a93\": rpc error: code = NotFound desc = could not find container \"39e1d80044b22d3d37c142c444487a4306141ff1d77dee215b6a61dd4f928a93\": container with ID starting with 39e1d80044b22d3d37c142c444487a4306141ff1d77dee215b6a61dd4f928a93 not found: ID does not exist" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.324789 4809 scope.go:117] "RemoveContainer" containerID="d532a198802e52e25985eb30ae8892859295dce6e8f4089c7ec4821401e9c47d" Sep 30 00:13:29 crc kubenswrapper[4809]: E0930 00:13:29.325095 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d532a198802e52e25985eb30ae8892859295dce6e8f4089c7ec4821401e9c47d\": container with ID starting with d532a198802e52e25985eb30ae8892859295dce6e8f4089c7ec4821401e9c47d not found: ID does not exist" containerID="d532a198802e52e25985eb30ae8892859295dce6e8f4089c7ec4821401e9c47d" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.325134 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d532a198802e52e25985eb30ae8892859295dce6e8f4089c7ec4821401e9c47d"} err="failed to get container status \"d532a198802e52e25985eb30ae8892859295dce6e8f4089c7ec4821401e9c47d\": rpc error: code = NotFound desc = could not find container \"d532a198802e52e25985eb30ae8892859295dce6e8f4089c7ec4821401e9c47d\": container with ID starting with d532a198802e52e25985eb30ae8892859295dce6e8f4089c7ec4821401e9c47d not found: ID does not exist" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.325158 4809 scope.go:117] "RemoveContainer" containerID="3d283f39ae196ccd3a42af35850a2f092579b0649ce88fabd1be9e74fc17c2cb" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.332440 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.338384 4809 scope.go:117] "RemoveContainer" containerID="89c667e44430b1de9c2c155d016a066b9a4327c9d1460318ca2935d132bec7ba" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.355543 4809 scope.go:117] "RemoveContainer" containerID="ce83333ce196eb6ed46c047f1080d71842c3047f59cf1f8d3d94216b829363aa" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.361588 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tcqkz"] Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.371685 4809 scope.go:117] "RemoveContainer" containerID="3d283f39ae196ccd3a42af35850a2f092579b0649ce88fabd1be9e74fc17c2cb" Sep 30 00:13:29 crc kubenswrapper[4809]: E0930 00:13:29.382010 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d283f39ae196ccd3a42af35850a2f092579b0649ce88fabd1be9e74fc17c2cb\": container with ID starting with 3d283f39ae196ccd3a42af35850a2f092579b0649ce88fabd1be9e74fc17c2cb not found: ID does not exist" containerID="3d283f39ae196ccd3a42af35850a2f092579b0649ce88fabd1be9e74fc17c2cb" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.382081 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d283f39ae196ccd3a42af35850a2f092579b0649ce88fabd1be9e74fc17c2cb"} err="failed to get container status \"3d283f39ae196ccd3a42af35850a2f092579b0649ce88fabd1be9e74fc17c2cb\": rpc error: code = NotFound desc = could not find container \"3d283f39ae196ccd3a42af35850a2f092579b0649ce88fabd1be9e74fc17c2cb\": container with ID starting with 3d283f39ae196ccd3a42af35850a2f092579b0649ce88fabd1be9e74fc17c2cb not found: ID does not exist" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.382136 4809 scope.go:117] "RemoveContainer" containerID="89c667e44430b1de9c2c155d016a066b9a4327c9d1460318ca2935d132bec7ba" Sep 30 00:13:29 crc kubenswrapper[4809]: E0930 00:13:29.383176 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89c667e44430b1de9c2c155d016a066b9a4327c9d1460318ca2935d132bec7ba\": container with ID starting with 89c667e44430b1de9c2c155d016a066b9a4327c9d1460318ca2935d132bec7ba not found: ID does not exist" containerID="89c667e44430b1de9c2c155d016a066b9a4327c9d1460318ca2935d132bec7ba" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.383217 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89c667e44430b1de9c2c155d016a066b9a4327c9d1460318ca2935d132bec7ba"} err="failed to get container status \"89c667e44430b1de9c2c155d016a066b9a4327c9d1460318ca2935d132bec7ba\": rpc error: code = NotFound desc = could not find container \"89c667e44430b1de9c2c155d016a066b9a4327c9d1460318ca2935d132bec7ba\": container with ID starting with 89c667e44430b1de9c2c155d016a066b9a4327c9d1460318ca2935d132bec7ba not found: ID does not exist" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.383248 4809 scope.go:117] "RemoveContainer" containerID="ce83333ce196eb6ed46c047f1080d71842c3047f59cf1f8d3d94216b829363aa" Sep 30 00:13:29 crc kubenswrapper[4809]: E0930 00:13:29.383572 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce83333ce196eb6ed46c047f1080d71842c3047f59cf1f8d3d94216b829363aa\": container with ID starting with ce83333ce196eb6ed46c047f1080d71842c3047f59cf1f8d3d94216b829363aa not found: ID does not exist" containerID="ce83333ce196eb6ed46c047f1080d71842c3047f59cf1f8d3d94216b829363aa" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.383590 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce83333ce196eb6ed46c047f1080d71842c3047f59cf1f8d3d94216b829363aa"} err="failed to get container status \"ce83333ce196eb6ed46c047f1080d71842c3047f59cf1f8d3d94216b829363aa\": rpc error: code = NotFound desc = could not find container \"ce83333ce196eb6ed46c047f1080d71842c3047f59cf1f8d3d94216b829363aa\": container with ID starting with ce83333ce196eb6ed46c047f1080d71842c3047f59cf1f8d3d94216b829363aa not found: ID does not exist" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.479219 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l74tx"] Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.483158 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-l74tx"] Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.490825 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8w9pr"] Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.493389 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8w9pr"] Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.697558 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00358083-724a-47cb-b376-1cc3d97a9fab" path="/var/lib/kubelet/pods/00358083-724a-47cb-b376-1cc3d97a9fab/volumes" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.698350 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31398a45-e7dc-4b71-81ac-0fc5c16b9c26" path="/var/lib/kubelet/pods/31398a45-e7dc-4b71-81ac-0fc5c16b9c26/volumes" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.708963 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c28e622-d89b-4819-b346-5cc07af83b8b" path="/var/lib/kubelet/pods/9c28e622-d89b-4819-b346-5cc07af83b8b/volumes" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.710995 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2" path="/var/lib/kubelet/pods/a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2/volumes" Sep 30 00:13:29 crc kubenswrapper[4809]: I0930 00:13:29.711810 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="edff5a66-7ade-44f3-a770-62a25d56e674" path="/var/lib/kubelet/pods/edff5a66-7ade-44f3-a770-62a25d56e674/volumes" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.166677 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tcqkz" event={"ID":"4983b18e-5198-4c30-874a-50c718310352","Type":"ContainerStarted","Data":"416ba9c9f331f06804d9aa2e5f0eb486906e189f37a93719b15f2461217f1b2d"} Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.166733 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tcqkz" event={"ID":"4983b18e-5198-4c30-874a-50c718310352","Type":"ContainerStarted","Data":"658b394b5884f48dcc0d36949d64182e420ea5719351f19e26d0e6ea217e81cb"} Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.167025 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-tcqkz" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.171027 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-tcqkz" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.185899 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-tcqkz" podStartSLOduration=2.185870329 podStartE2EDuration="2.185870329s" podCreationTimestamp="2025-09-30 00:13:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:13:30.184810258 +0000 UTC m=+261.221059676" watchObservedRunningTime="2025-09-30 00:13:30.185870329 +0000 UTC m=+261.222119747" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.696405 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pskp9"] Sep 30 00:13:30 crc kubenswrapper[4809]: E0930 00:13:30.697014 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00358083-724a-47cb-b376-1cc3d97a9fab" containerName="extract-content" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.697027 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="00358083-724a-47cb-b376-1cc3d97a9fab" containerName="extract-content" Sep 30 00:13:30 crc kubenswrapper[4809]: E0930 00:13:30.697042 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31398a45-e7dc-4b71-81ac-0fc5c16b9c26" containerName="registry-server" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.697072 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="31398a45-e7dc-4b71-81ac-0fc5c16b9c26" containerName="registry-server" Sep 30 00:13:30 crc kubenswrapper[4809]: E0930 00:13:30.697082 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31398a45-e7dc-4b71-81ac-0fc5c16b9c26" containerName="extract-content" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.697091 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="31398a45-e7dc-4b71-81ac-0fc5c16b9c26" containerName="extract-content" Sep 30 00:13:30 crc kubenswrapper[4809]: E0930 00:13:30.697100 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31398a45-e7dc-4b71-81ac-0fc5c16b9c26" containerName="extract-utilities" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.697109 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="31398a45-e7dc-4b71-81ac-0fc5c16b9c26" containerName="extract-utilities" Sep 30 00:13:30 crc kubenswrapper[4809]: E0930 00:13:30.697122 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2" containerName="extract-utilities" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.697129 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2" containerName="extract-utilities" Sep 30 00:13:30 crc kubenswrapper[4809]: E0930 00:13:30.697142 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00358083-724a-47cb-b376-1cc3d97a9fab" containerName="extract-utilities" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.697149 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="00358083-724a-47cb-b376-1cc3d97a9fab" containerName="extract-utilities" Sep 30 00:13:30 crc kubenswrapper[4809]: E0930 00:13:30.697158 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00358083-724a-47cb-b376-1cc3d97a9fab" containerName="registry-server" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.697165 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="00358083-724a-47cb-b376-1cc3d97a9fab" containerName="registry-server" Sep 30 00:13:30 crc kubenswrapper[4809]: E0930 00:13:30.697178 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c28e622-d89b-4819-b346-5cc07af83b8b" containerName="extract-utilities" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.697185 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c28e622-d89b-4819-b346-5cc07af83b8b" containerName="extract-utilities" Sep 30 00:13:30 crc kubenswrapper[4809]: E0930 00:13:30.697194 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edff5a66-7ade-44f3-a770-62a25d56e674" containerName="marketplace-operator" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.697201 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="edff5a66-7ade-44f3-a770-62a25d56e674" containerName="marketplace-operator" Sep 30 00:13:30 crc kubenswrapper[4809]: E0930 00:13:30.697212 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2" containerName="registry-server" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.697220 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2" containerName="registry-server" Sep 30 00:13:30 crc kubenswrapper[4809]: E0930 00:13:30.697231 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c28e622-d89b-4819-b346-5cc07af83b8b" containerName="extract-content" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.697239 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c28e622-d89b-4819-b346-5cc07af83b8b" containerName="extract-content" Sep 30 00:13:30 crc kubenswrapper[4809]: E0930 00:13:30.697249 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2" containerName="extract-content" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.697256 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2" containerName="extract-content" Sep 30 00:13:30 crc kubenswrapper[4809]: E0930 00:13:30.697265 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c28e622-d89b-4819-b346-5cc07af83b8b" containerName="registry-server" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.697273 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c28e622-d89b-4819-b346-5cc07af83b8b" containerName="registry-server" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.697373 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5f8e57a-8ee9-4443-a31f-221ed6ad3ad2" containerName="registry-server" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.697385 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="00358083-724a-47cb-b376-1cc3d97a9fab" containerName="registry-server" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.697395 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="edff5a66-7ade-44f3-a770-62a25d56e674" containerName="marketplace-operator" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.697407 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c28e622-d89b-4819-b346-5cc07af83b8b" containerName="registry-server" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.697417 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="31398a45-e7dc-4b71-81ac-0fc5c16b9c26" containerName="registry-server" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.698279 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pskp9" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.700978 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.712385 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pskp9"] Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.857334 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgxj5\" (UniqueName: \"kubernetes.io/projected/9c79b2c1-6175-4968-bd68-8a8493bf2e1f-kube-api-access-hgxj5\") pod \"redhat-marketplace-pskp9\" (UID: \"9c79b2c1-6175-4968-bd68-8a8493bf2e1f\") " pod="openshift-marketplace/redhat-marketplace-pskp9" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.857411 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c79b2c1-6175-4968-bd68-8a8493bf2e1f-catalog-content\") pod \"redhat-marketplace-pskp9\" (UID: \"9c79b2c1-6175-4968-bd68-8a8493bf2e1f\") " pod="openshift-marketplace/redhat-marketplace-pskp9" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.857551 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c79b2c1-6175-4968-bd68-8a8493bf2e1f-utilities\") pod \"redhat-marketplace-pskp9\" (UID: \"9c79b2c1-6175-4968-bd68-8a8493bf2e1f\") " pod="openshift-marketplace/redhat-marketplace-pskp9" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.909119 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2mrbv"] Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.910282 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2mrbv" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.913631 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.949696 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2mrbv"] Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.959019 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c79b2c1-6175-4968-bd68-8a8493bf2e1f-catalog-content\") pod \"redhat-marketplace-pskp9\" (UID: \"9c79b2c1-6175-4968-bd68-8a8493bf2e1f\") " pod="openshift-marketplace/redhat-marketplace-pskp9" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.959329 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c79b2c1-6175-4968-bd68-8a8493bf2e1f-utilities\") pod \"redhat-marketplace-pskp9\" (UID: \"9c79b2c1-6175-4968-bd68-8a8493bf2e1f\") " pod="openshift-marketplace/redhat-marketplace-pskp9" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.959481 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgxj5\" (UniqueName: \"kubernetes.io/projected/9c79b2c1-6175-4968-bd68-8a8493bf2e1f-kube-api-access-hgxj5\") pod \"redhat-marketplace-pskp9\" (UID: \"9c79b2c1-6175-4968-bd68-8a8493bf2e1f\") " pod="openshift-marketplace/redhat-marketplace-pskp9" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.959530 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c79b2c1-6175-4968-bd68-8a8493bf2e1f-catalog-content\") pod \"redhat-marketplace-pskp9\" (UID: \"9c79b2c1-6175-4968-bd68-8a8493bf2e1f\") " pod="openshift-marketplace/redhat-marketplace-pskp9" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.959738 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c79b2c1-6175-4968-bd68-8a8493bf2e1f-utilities\") pod \"redhat-marketplace-pskp9\" (UID: \"9c79b2c1-6175-4968-bd68-8a8493bf2e1f\") " pod="openshift-marketplace/redhat-marketplace-pskp9" Sep 30 00:13:30 crc kubenswrapper[4809]: I0930 00:13:30.993168 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgxj5\" (UniqueName: \"kubernetes.io/projected/9c79b2c1-6175-4968-bd68-8a8493bf2e1f-kube-api-access-hgxj5\") pod \"redhat-marketplace-pskp9\" (UID: \"9c79b2c1-6175-4968-bd68-8a8493bf2e1f\") " pod="openshift-marketplace/redhat-marketplace-pskp9" Sep 30 00:13:31 crc kubenswrapper[4809]: I0930 00:13:31.060618 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pv27\" (UniqueName: \"kubernetes.io/projected/b804053a-50f4-4355-8dc6-2183cb8c964f-kube-api-access-9pv27\") pod \"certified-operators-2mrbv\" (UID: \"b804053a-50f4-4355-8dc6-2183cb8c964f\") " pod="openshift-marketplace/certified-operators-2mrbv" Sep 30 00:13:31 crc kubenswrapper[4809]: I0930 00:13:31.060704 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b804053a-50f4-4355-8dc6-2183cb8c964f-catalog-content\") pod \"certified-operators-2mrbv\" (UID: \"b804053a-50f4-4355-8dc6-2183cb8c964f\") " pod="openshift-marketplace/certified-operators-2mrbv" Sep 30 00:13:31 crc kubenswrapper[4809]: I0930 00:13:31.060792 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b804053a-50f4-4355-8dc6-2183cb8c964f-utilities\") pod \"certified-operators-2mrbv\" (UID: \"b804053a-50f4-4355-8dc6-2183cb8c964f\") " pod="openshift-marketplace/certified-operators-2mrbv" Sep 30 00:13:31 crc kubenswrapper[4809]: I0930 00:13:31.067624 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pskp9" Sep 30 00:13:31 crc kubenswrapper[4809]: I0930 00:13:31.162513 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b804053a-50f4-4355-8dc6-2183cb8c964f-utilities\") pod \"certified-operators-2mrbv\" (UID: \"b804053a-50f4-4355-8dc6-2183cb8c964f\") " pod="openshift-marketplace/certified-operators-2mrbv" Sep 30 00:13:31 crc kubenswrapper[4809]: I0930 00:13:31.162597 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pv27\" (UniqueName: \"kubernetes.io/projected/b804053a-50f4-4355-8dc6-2183cb8c964f-kube-api-access-9pv27\") pod \"certified-operators-2mrbv\" (UID: \"b804053a-50f4-4355-8dc6-2183cb8c964f\") " pod="openshift-marketplace/certified-operators-2mrbv" Sep 30 00:13:31 crc kubenswrapper[4809]: I0930 00:13:31.162674 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b804053a-50f4-4355-8dc6-2183cb8c964f-catalog-content\") pod \"certified-operators-2mrbv\" (UID: \"b804053a-50f4-4355-8dc6-2183cb8c964f\") " pod="openshift-marketplace/certified-operators-2mrbv" Sep 30 00:13:31 crc kubenswrapper[4809]: I0930 00:13:31.163934 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b804053a-50f4-4355-8dc6-2183cb8c964f-utilities\") pod \"certified-operators-2mrbv\" (UID: \"b804053a-50f4-4355-8dc6-2183cb8c964f\") " pod="openshift-marketplace/certified-operators-2mrbv" Sep 30 00:13:31 crc kubenswrapper[4809]: I0930 00:13:31.163979 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b804053a-50f4-4355-8dc6-2183cb8c964f-catalog-content\") pod \"certified-operators-2mrbv\" (UID: \"b804053a-50f4-4355-8dc6-2183cb8c964f\") " pod="openshift-marketplace/certified-operators-2mrbv" Sep 30 00:13:31 crc kubenswrapper[4809]: I0930 00:13:31.192600 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pv27\" (UniqueName: \"kubernetes.io/projected/b804053a-50f4-4355-8dc6-2183cb8c964f-kube-api-access-9pv27\") pod \"certified-operators-2mrbv\" (UID: \"b804053a-50f4-4355-8dc6-2183cb8c964f\") " pod="openshift-marketplace/certified-operators-2mrbv" Sep 30 00:13:31 crc kubenswrapper[4809]: I0930 00:13:31.232775 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2mrbv" Sep 30 00:13:31 crc kubenswrapper[4809]: I0930 00:13:31.425303 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2mrbv"] Sep 30 00:13:31 crc kubenswrapper[4809]: I0930 00:13:31.486522 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pskp9"] Sep 30 00:13:32 crc kubenswrapper[4809]: I0930 00:13:32.178925 4809 generic.go:334] "Generic (PLEG): container finished" podID="b804053a-50f4-4355-8dc6-2183cb8c964f" containerID="9a9b5ed8c0262a8acf789917c84af996decd7f2a945a84e2df7a06f0bd2f1eb0" exitCode=0 Sep 30 00:13:32 crc kubenswrapper[4809]: I0930 00:13:32.179098 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2mrbv" event={"ID":"b804053a-50f4-4355-8dc6-2183cb8c964f","Type":"ContainerDied","Data":"9a9b5ed8c0262a8acf789917c84af996decd7f2a945a84e2df7a06f0bd2f1eb0"} Sep 30 00:13:32 crc kubenswrapper[4809]: I0930 00:13:32.179172 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2mrbv" event={"ID":"b804053a-50f4-4355-8dc6-2183cb8c964f","Type":"ContainerStarted","Data":"c63cdececb223993e6acba7731c751a1daf7dcda70525f7a925cdbe05e8fcf5e"} Sep 30 00:13:32 crc kubenswrapper[4809]: I0930 00:13:32.182231 4809 generic.go:334] "Generic (PLEG): container finished" podID="9c79b2c1-6175-4968-bd68-8a8493bf2e1f" containerID="2b8534484b0c478ac3202db76ac69088a9dbc16d18fbf8c46ac5966232d4202f" exitCode=0 Sep 30 00:13:32 crc kubenswrapper[4809]: I0930 00:13:32.183184 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pskp9" event={"ID":"9c79b2c1-6175-4968-bd68-8a8493bf2e1f","Type":"ContainerDied","Data":"2b8534484b0c478ac3202db76ac69088a9dbc16d18fbf8c46ac5966232d4202f"} Sep 30 00:13:32 crc kubenswrapper[4809]: I0930 00:13:32.183237 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pskp9" event={"ID":"9c79b2c1-6175-4968-bd68-8a8493bf2e1f","Type":"ContainerStarted","Data":"88dbb856f85d192782bf1ae4e04bde8c1e55eeb5bffbabaaef518601b7a13d71"} Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.095750 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vwc2v"] Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.097544 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vwc2v" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.102148 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.110899 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vwc2v"] Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.188402 4809 generic.go:334] "Generic (PLEG): container finished" podID="b804053a-50f4-4355-8dc6-2183cb8c964f" containerID="3f8507ab99a3b4874c40edaac221cd5ed2a6317370fcec3f6816a8f0263b56b6" exitCode=0 Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.188449 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2mrbv" event={"ID":"b804053a-50f4-4355-8dc6-2183cb8c964f","Type":"ContainerDied","Data":"3f8507ab99a3b4874c40edaac221cd5ed2a6317370fcec3f6816a8f0263b56b6"} Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.194980 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8p6v\" (UniqueName: \"kubernetes.io/projected/21e7f024-842e-4582-b6a5-2776d20aefb2-kube-api-access-t8p6v\") pod \"redhat-operators-vwc2v\" (UID: \"21e7f024-842e-4582-b6a5-2776d20aefb2\") " pod="openshift-marketplace/redhat-operators-vwc2v" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.195027 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21e7f024-842e-4582-b6a5-2776d20aefb2-catalog-content\") pod \"redhat-operators-vwc2v\" (UID: \"21e7f024-842e-4582-b6a5-2776d20aefb2\") " pod="openshift-marketplace/redhat-operators-vwc2v" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.195061 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21e7f024-842e-4582-b6a5-2776d20aefb2-utilities\") pod \"redhat-operators-vwc2v\" (UID: \"21e7f024-842e-4582-b6a5-2776d20aefb2\") " pod="openshift-marketplace/redhat-operators-vwc2v" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.295847 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4pvqk"] Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.297091 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21e7f024-842e-4582-b6a5-2776d20aefb2-catalog-content\") pod \"redhat-operators-vwc2v\" (UID: \"21e7f024-842e-4582-b6a5-2776d20aefb2\") " pod="openshift-marketplace/redhat-operators-vwc2v" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.297283 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21e7f024-842e-4582-b6a5-2776d20aefb2-utilities\") pod \"redhat-operators-vwc2v\" (UID: \"21e7f024-842e-4582-b6a5-2776d20aefb2\") " pod="openshift-marketplace/redhat-operators-vwc2v" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.297496 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8p6v\" (UniqueName: \"kubernetes.io/projected/21e7f024-842e-4582-b6a5-2776d20aefb2-kube-api-access-t8p6v\") pod \"redhat-operators-vwc2v\" (UID: \"21e7f024-842e-4582-b6a5-2776d20aefb2\") " pod="openshift-marketplace/redhat-operators-vwc2v" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.297701 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21e7f024-842e-4582-b6a5-2776d20aefb2-catalog-content\") pod \"redhat-operators-vwc2v\" (UID: \"21e7f024-842e-4582-b6a5-2776d20aefb2\") " pod="openshift-marketplace/redhat-operators-vwc2v" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.297826 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21e7f024-842e-4582-b6a5-2776d20aefb2-utilities\") pod \"redhat-operators-vwc2v\" (UID: \"21e7f024-842e-4582-b6a5-2776d20aefb2\") " pod="openshift-marketplace/redhat-operators-vwc2v" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.299649 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4pvqk" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.305930 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.309373 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4pvqk"] Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.324824 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8p6v\" (UniqueName: \"kubernetes.io/projected/21e7f024-842e-4582-b6a5-2776d20aefb2-kube-api-access-t8p6v\") pod \"redhat-operators-vwc2v\" (UID: \"21e7f024-842e-4582-b6a5-2776d20aefb2\") " pod="openshift-marketplace/redhat-operators-vwc2v" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.398436 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/008393f8-6b7a-461e-91a1-47e58d4942d4-utilities\") pod \"community-operators-4pvqk\" (UID: \"008393f8-6b7a-461e-91a1-47e58d4942d4\") " pod="openshift-marketplace/community-operators-4pvqk" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.398503 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/008393f8-6b7a-461e-91a1-47e58d4942d4-catalog-content\") pod \"community-operators-4pvqk\" (UID: \"008393f8-6b7a-461e-91a1-47e58d4942d4\") " pod="openshift-marketplace/community-operators-4pvqk" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.398602 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwrpq\" (UniqueName: \"kubernetes.io/projected/008393f8-6b7a-461e-91a1-47e58d4942d4-kube-api-access-mwrpq\") pod \"community-operators-4pvqk\" (UID: \"008393f8-6b7a-461e-91a1-47e58d4942d4\") " pod="openshift-marketplace/community-operators-4pvqk" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.500729 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/008393f8-6b7a-461e-91a1-47e58d4942d4-utilities\") pod \"community-operators-4pvqk\" (UID: \"008393f8-6b7a-461e-91a1-47e58d4942d4\") " pod="openshift-marketplace/community-operators-4pvqk" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.500800 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/008393f8-6b7a-461e-91a1-47e58d4942d4-catalog-content\") pod \"community-operators-4pvqk\" (UID: \"008393f8-6b7a-461e-91a1-47e58d4942d4\") " pod="openshift-marketplace/community-operators-4pvqk" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.500851 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwrpq\" (UniqueName: \"kubernetes.io/projected/008393f8-6b7a-461e-91a1-47e58d4942d4-kube-api-access-mwrpq\") pod \"community-operators-4pvqk\" (UID: \"008393f8-6b7a-461e-91a1-47e58d4942d4\") " pod="openshift-marketplace/community-operators-4pvqk" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.501351 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/008393f8-6b7a-461e-91a1-47e58d4942d4-utilities\") pod \"community-operators-4pvqk\" (UID: \"008393f8-6b7a-461e-91a1-47e58d4942d4\") " pod="openshift-marketplace/community-operators-4pvqk" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.501816 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/008393f8-6b7a-461e-91a1-47e58d4942d4-catalog-content\") pod \"community-operators-4pvqk\" (UID: \"008393f8-6b7a-461e-91a1-47e58d4942d4\") " pod="openshift-marketplace/community-operators-4pvqk" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.525732 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwrpq\" (UniqueName: \"kubernetes.io/projected/008393f8-6b7a-461e-91a1-47e58d4942d4-kube-api-access-mwrpq\") pod \"community-operators-4pvqk\" (UID: \"008393f8-6b7a-461e-91a1-47e58d4942d4\") " pod="openshift-marketplace/community-operators-4pvqk" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.538257 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vwc2v" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.623803 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4pvqk" Sep 30 00:13:33 crc kubenswrapper[4809]: I0930 00:13:33.973043 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vwc2v"] Sep 30 00:13:33 crc kubenswrapper[4809]: W0930 00:13:33.980735 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod21e7f024_842e_4582_b6a5_2776d20aefb2.slice/crio-76cd0fe4500348bc4f61012b7084d96fb81815f8307a431a553ca219a2926e09 WatchSource:0}: Error finding container 76cd0fe4500348bc4f61012b7084d96fb81815f8307a431a553ca219a2926e09: Status 404 returned error can't find the container with id 76cd0fe4500348bc4f61012b7084d96fb81815f8307a431a553ca219a2926e09 Sep 30 00:13:34 crc kubenswrapper[4809]: I0930 00:13:34.061488 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4pvqk"] Sep 30 00:13:34 crc kubenswrapper[4809]: W0930 00:13:34.085288 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod008393f8_6b7a_461e_91a1_47e58d4942d4.slice/crio-13df7275ca2c97c776cf74be1a01d475d8dd55fca69f183931bc64e77af22e0c WatchSource:0}: Error finding container 13df7275ca2c97c776cf74be1a01d475d8dd55fca69f183931bc64e77af22e0c: Status 404 returned error can't find the container with id 13df7275ca2c97c776cf74be1a01d475d8dd55fca69f183931bc64e77af22e0c Sep 30 00:13:34 crc kubenswrapper[4809]: I0930 00:13:34.193997 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4pvqk" event={"ID":"008393f8-6b7a-461e-91a1-47e58d4942d4","Type":"ContainerStarted","Data":"13df7275ca2c97c776cf74be1a01d475d8dd55fca69f183931bc64e77af22e0c"} Sep 30 00:13:34 crc kubenswrapper[4809]: I0930 00:13:34.196152 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2mrbv" event={"ID":"b804053a-50f4-4355-8dc6-2183cb8c964f","Type":"ContainerStarted","Data":"82b16a8651a4f11a21747de3f33dcd9c169ae8d660668e59683507e8265c21a2"} Sep 30 00:13:34 crc kubenswrapper[4809]: I0930 00:13:34.199027 4809 generic.go:334] "Generic (PLEG): container finished" podID="9c79b2c1-6175-4968-bd68-8a8493bf2e1f" containerID="e5312d9d82d10786306abc6d05fa073bc82ec60cf7c80c4e9f1411b27bd5163e" exitCode=0 Sep 30 00:13:34 crc kubenswrapper[4809]: I0930 00:13:34.199242 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pskp9" event={"ID":"9c79b2c1-6175-4968-bd68-8a8493bf2e1f","Type":"ContainerDied","Data":"e5312d9d82d10786306abc6d05fa073bc82ec60cf7c80c4e9f1411b27bd5163e"} Sep 30 00:13:34 crc kubenswrapper[4809]: I0930 00:13:34.200536 4809 generic.go:334] "Generic (PLEG): container finished" podID="21e7f024-842e-4582-b6a5-2776d20aefb2" containerID="b5c1e4b8941fc513f80fd023bcb4fd81c16e75432f363cc155c7b120ecaa7857" exitCode=0 Sep 30 00:13:34 crc kubenswrapper[4809]: I0930 00:13:34.200564 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vwc2v" event={"ID":"21e7f024-842e-4582-b6a5-2776d20aefb2","Type":"ContainerDied","Data":"b5c1e4b8941fc513f80fd023bcb4fd81c16e75432f363cc155c7b120ecaa7857"} Sep 30 00:13:34 crc kubenswrapper[4809]: I0930 00:13:34.200608 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vwc2v" event={"ID":"21e7f024-842e-4582-b6a5-2776d20aefb2","Type":"ContainerStarted","Data":"76cd0fe4500348bc4f61012b7084d96fb81815f8307a431a553ca219a2926e09"} Sep 30 00:13:34 crc kubenswrapper[4809]: I0930 00:13:34.236707 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2mrbv" podStartSLOduration=2.845512634 podStartE2EDuration="4.236677695s" podCreationTimestamp="2025-09-30 00:13:30 +0000 UTC" firstStartedPulling="2025-09-30 00:13:32.180625152 +0000 UTC m=+263.216874560" lastFinishedPulling="2025-09-30 00:13:33.571790213 +0000 UTC m=+264.608039621" observedRunningTime="2025-09-30 00:13:34.226937619 +0000 UTC m=+265.263187037" watchObservedRunningTime="2025-09-30 00:13:34.236677695 +0000 UTC m=+265.272927103" Sep 30 00:13:35 crc kubenswrapper[4809]: I0930 00:13:35.213869 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pskp9" event={"ID":"9c79b2c1-6175-4968-bd68-8a8493bf2e1f","Type":"ContainerStarted","Data":"4b3018cc7a83077b17a75fbeb5bc5f07b24f6c30c12f16c1a77a1aa622d8f729"} Sep 30 00:13:35 crc kubenswrapper[4809]: I0930 00:13:35.216671 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vwc2v" event={"ID":"21e7f024-842e-4582-b6a5-2776d20aefb2","Type":"ContainerStarted","Data":"1b744c60d3ec3ef7d244294a13ba794fd9140370a29d36166b3df6db0ee664a8"} Sep 30 00:13:35 crc kubenswrapper[4809]: I0930 00:13:35.224237 4809 generic.go:334] "Generic (PLEG): container finished" podID="008393f8-6b7a-461e-91a1-47e58d4942d4" containerID="851a35b756521471e265f485d36e4ee8e3552e598a8f27e46a5470d932623387" exitCode=0 Sep 30 00:13:35 crc kubenswrapper[4809]: I0930 00:13:35.224354 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4pvqk" event={"ID":"008393f8-6b7a-461e-91a1-47e58d4942d4","Type":"ContainerDied","Data":"851a35b756521471e265f485d36e4ee8e3552e598a8f27e46a5470d932623387"} Sep 30 00:13:35 crc kubenswrapper[4809]: I0930 00:13:35.233147 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pskp9" podStartSLOduration=2.769237012 podStartE2EDuration="5.233127967s" podCreationTimestamp="2025-09-30 00:13:30 +0000 UTC" firstStartedPulling="2025-09-30 00:13:32.187151093 +0000 UTC m=+263.223400491" lastFinishedPulling="2025-09-30 00:13:34.651042038 +0000 UTC m=+265.687291446" observedRunningTime="2025-09-30 00:13:35.232973243 +0000 UTC m=+266.269222651" watchObservedRunningTime="2025-09-30 00:13:35.233127967 +0000 UTC m=+266.269377375" Sep 30 00:13:36 crc kubenswrapper[4809]: I0930 00:13:36.231470 4809 generic.go:334] "Generic (PLEG): container finished" podID="21e7f024-842e-4582-b6a5-2776d20aefb2" containerID="1b744c60d3ec3ef7d244294a13ba794fd9140370a29d36166b3df6db0ee664a8" exitCode=0 Sep 30 00:13:36 crc kubenswrapper[4809]: I0930 00:13:36.231578 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vwc2v" event={"ID":"21e7f024-842e-4582-b6a5-2776d20aefb2","Type":"ContainerDied","Data":"1b744c60d3ec3ef7d244294a13ba794fd9140370a29d36166b3df6db0ee664a8"} Sep 30 00:13:37 crc kubenswrapper[4809]: I0930 00:13:37.239612 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vwc2v" event={"ID":"21e7f024-842e-4582-b6a5-2776d20aefb2","Type":"ContainerStarted","Data":"d3392a70ddf9650a433b47889ccfefe81ca11b0ba5b61086a60e5832a3af8f8b"} Sep 30 00:13:37 crc kubenswrapper[4809]: I0930 00:13:37.241252 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4pvqk" event={"ID":"008393f8-6b7a-461e-91a1-47e58d4942d4","Type":"ContainerStarted","Data":"96b4a2bc7460f0f5c990212e6a077d7380237009ee3317c8db49a1cafa66a8eb"} Sep 30 00:13:37 crc kubenswrapper[4809]: I0930 00:13:37.268292 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vwc2v" podStartSLOduration=1.572234151 podStartE2EDuration="4.268270562s" podCreationTimestamp="2025-09-30 00:13:33 +0000 UTC" firstStartedPulling="2025-09-30 00:13:34.201866741 +0000 UTC m=+265.238116149" lastFinishedPulling="2025-09-30 00:13:36.897903152 +0000 UTC m=+267.934152560" observedRunningTime="2025-09-30 00:13:37.26743514 +0000 UTC m=+268.303684568" watchObservedRunningTime="2025-09-30 00:13:37.268270562 +0000 UTC m=+268.304519980" Sep 30 00:13:38 crc kubenswrapper[4809]: I0930 00:13:38.247372 4809 generic.go:334] "Generic (PLEG): container finished" podID="008393f8-6b7a-461e-91a1-47e58d4942d4" containerID="96b4a2bc7460f0f5c990212e6a077d7380237009ee3317c8db49a1cafa66a8eb" exitCode=0 Sep 30 00:13:38 crc kubenswrapper[4809]: I0930 00:13:38.248727 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4pvqk" event={"ID":"008393f8-6b7a-461e-91a1-47e58d4942d4","Type":"ContainerDied","Data":"96b4a2bc7460f0f5c990212e6a077d7380237009ee3317c8db49a1cafa66a8eb"} Sep 30 00:13:39 crc kubenswrapper[4809]: I0930 00:13:39.256025 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4pvqk" event={"ID":"008393f8-6b7a-461e-91a1-47e58d4942d4","Type":"ContainerStarted","Data":"773ddc5ee5b23beaa9762a5876cffe67aeb84fff5fd9a78fb559a58c257c92df"} Sep 30 00:13:39 crc kubenswrapper[4809]: I0930 00:13:39.274609 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4pvqk" podStartSLOduration=2.6236964069999997 podStartE2EDuration="6.27459204s" podCreationTimestamp="2025-09-30 00:13:33 +0000 UTC" firstStartedPulling="2025-09-30 00:13:35.226128864 +0000 UTC m=+266.262378272" lastFinishedPulling="2025-09-30 00:13:38.877024497 +0000 UTC m=+269.913273905" observedRunningTime="2025-09-30 00:13:39.272570116 +0000 UTC m=+270.308819524" watchObservedRunningTime="2025-09-30 00:13:39.27459204 +0000 UTC m=+270.310841448" Sep 30 00:13:41 crc kubenswrapper[4809]: I0930 00:13:41.068050 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pskp9" Sep 30 00:13:41 crc kubenswrapper[4809]: I0930 00:13:41.068111 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pskp9" Sep 30 00:13:41 crc kubenswrapper[4809]: I0930 00:13:41.111226 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pskp9" Sep 30 00:13:41 crc kubenswrapper[4809]: I0930 00:13:41.233899 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2mrbv" Sep 30 00:13:41 crc kubenswrapper[4809]: I0930 00:13:41.234372 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2mrbv" Sep 30 00:13:41 crc kubenswrapper[4809]: I0930 00:13:41.276308 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2mrbv" Sep 30 00:13:41 crc kubenswrapper[4809]: I0930 00:13:41.316080 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2mrbv" Sep 30 00:13:41 crc kubenswrapper[4809]: I0930 00:13:41.323228 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pskp9" Sep 30 00:13:43 crc kubenswrapper[4809]: I0930 00:13:43.538548 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vwc2v" Sep 30 00:13:43 crc kubenswrapper[4809]: I0930 00:13:43.539008 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vwc2v" Sep 30 00:13:43 crc kubenswrapper[4809]: I0930 00:13:43.602089 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vwc2v" Sep 30 00:13:43 crc kubenswrapper[4809]: I0930 00:13:43.624896 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4pvqk" Sep 30 00:13:43 crc kubenswrapper[4809]: I0930 00:13:43.624954 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4pvqk" Sep 30 00:13:43 crc kubenswrapper[4809]: I0930 00:13:43.671092 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4pvqk" Sep 30 00:13:44 crc kubenswrapper[4809]: I0930 00:13:44.327717 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4pvqk" Sep 30 00:13:44 crc kubenswrapper[4809]: I0930 00:13:44.329906 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vwc2v" Sep 30 00:14:55 crc kubenswrapper[4809]: I0930 00:14:55.325245 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:14:55 crc kubenswrapper[4809]: I0930 00:14:55.325910 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:15:00 crc kubenswrapper[4809]: I0930 00:15:00.152419 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6"] Sep 30 00:15:00 crc kubenswrapper[4809]: I0930 00:15:00.156472 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6" Sep 30 00:15:00 crc kubenswrapper[4809]: I0930 00:15:00.156973 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6"] Sep 30 00:15:00 crc kubenswrapper[4809]: I0930 00:15:00.159574 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 00:15:00 crc kubenswrapper[4809]: I0930 00:15:00.161347 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 00:15:00 crc kubenswrapper[4809]: I0930 00:15:00.236404 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldkj2\" (UniqueName: \"kubernetes.io/projected/d75d9dbd-f149-4d41-86d6-c11dde89be6e-kube-api-access-ldkj2\") pod \"collect-profiles-29319855-n7sw6\" (UID: \"d75d9dbd-f149-4d41-86d6-c11dde89be6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6" Sep 30 00:15:00 crc kubenswrapper[4809]: I0930 00:15:00.236580 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d75d9dbd-f149-4d41-86d6-c11dde89be6e-secret-volume\") pod \"collect-profiles-29319855-n7sw6\" (UID: \"d75d9dbd-f149-4d41-86d6-c11dde89be6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6" Sep 30 00:15:00 crc kubenswrapper[4809]: I0930 00:15:00.236602 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d75d9dbd-f149-4d41-86d6-c11dde89be6e-config-volume\") pod \"collect-profiles-29319855-n7sw6\" (UID: \"d75d9dbd-f149-4d41-86d6-c11dde89be6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6" Sep 30 00:15:00 crc kubenswrapper[4809]: I0930 00:15:00.338356 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d75d9dbd-f149-4d41-86d6-c11dde89be6e-secret-volume\") pod \"collect-profiles-29319855-n7sw6\" (UID: \"d75d9dbd-f149-4d41-86d6-c11dde89be6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6" Sep 30 00:15:00 crc kubenswrapper[4809]: I0930 00:15:00.338428 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d75d9dbd-f149-4d41-86d6-c11dde89be6e-config-volume\") pod \"collect-profiles-29319855-n7sw6\" (UID: \"d75d9dbd-f149-4d41-86d6-c11dde89be6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6" Sep 30 00:15:00 crc kubenswrapper[4809]: I0930 00:15:00.338477 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldkj2\" (UniqueName: \"kubernetes.io/projected/d75d9dbd-f149-4d41-86d6-c11dde89be6e-kube-api-access-ldkj2\") pod \"collect-profiles-29319855-n7sw6\" (UID: \"d75d9dbd-f149-4d41-86d6-c11dde89be6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6" Sep 30 00:15:00 crc kubenswrapper[4809]: I0930 00:15:00.339347 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d75d9dbd-f149-4d41-86d6-c11dde89be6e-config-volume\") pod \"collect-profiles-29319855-n7sw6\" (UID: \"d75d9dbd-f149-4d41-86d6-c11dde89be6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6" Sep 30 00:15:00 crc kubenswrapper[4809]: I0930 00:15:00.347709 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d75d9dbd-f149-4d41-86d6-c11dde89be6e-secret-volume\") pod \"collect-profiles-29319855-n7sw6\" (UID: \"d75d9dbd-f149-4d41-86d6-c11dde89be6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6" Sep 30 00:15:00 crc kubenswrapper[4809]: I0930 00:15:00.355789 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldkj2\" (UniqueName: \"kubernetes.io/projected/d75d9dbd-f149-4d41-86d6-c11dde89be6e-kube-api-access-ldkj2\") pod \"collect-profiles-29319855-n7sw6\" (UID: \"d75d9dbd-f149-4d41-86d6-c11dde89be6e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6" Sep 30 00:15:00 crc kubenswrapper[4809]: I0930 00:15:00.525916 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6" Sep 30 00:15:00 crc kubenswrapper[4809]: I0930 00:15:00.724838 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6"] Sep 30 00:15:00 crc kubenswrapper[4809]: I0930 00:15:00.744916 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6" event={"ID":"d75d9dbd-f149-4d41-86d6-c11dde89be6e","Type":"ContainerStarted","Data":"61dd4461fef1aaea88cd9a99b3b0865ea6d5d3c9e1bc61d0656a54923aac5155"} Sep 30 00:15:01 crc kubenswrapper[4809]: I0930 00:15:01.755040 4809 generic.go:334] "Generic (PLEG): container finished" podID="d75d9dbd-f149-4d41-86d6-c11dde89be6e" containerID="3629b4d139dab398dd1a828a6e9129d9166699b013e6b158fe5a2bbc4b9bb38d" exitCode=0 Sep 30 00:15:01 crc kubenswrapper[4809]: I0930 00:15:01.755122 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6" event={"ID":"d75d9dbd-f149-4d41-86d6-c11dde89be6e","Type":"ContainerDied","Data":"3629b4d139dab398dd1a828a6e9129d9166699b013e6b158fe5a2bbc4b9bb38d"} Sep 30 00:15:02 crc kubenswrapper[4809]: I0930 00:15:02.994780 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6" Sep 30 00:15:03 crc kubenswrapper[4809]: I0930 00:15:03.073118 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldkj2\" (UniqueName: \"kubernetes.io/projected/d75d9dbd-f149-4d41-86d6-c11dde89be6e-kube-api-access-ldkj2\") pod \"d75d9dbd-f149-4d41-86d6-c11dde89be6e\" (UID: \"d75d9dbd-f149-4d41-86d6-c11dde89be6e\") " Sep 30 00:15:03 crc kubenswrapper[4809]: I0930 00:15:03.073202 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d75d9dbd-f149-4d41-86d6-c11dde89be6e-secret-volume\") pod \"d75d9dbd-f149-4d41-86d6-c11dde89be6e\" (UID: \"d75d9dbd-f149-4d41-86d6-c11dde89be6e\") " Sep 30 00:15:03 crc kubenswrapper[4809]: I0930 00:15:03.073333 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d75d9dbd-f149-4d41-86d6-c11dde89be6e-config-volume\") pod \"d75d9dbd-f149-4d41-86d6-c11dde89be6e\" (UID: \"d75d9dbd-f149-4d41-86d6-c11dde89be6e\") " Sep 30 00:15:03 crc kubenswrapper[4809]: I0930 00:15:03.074195 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d75d9dbd-f149-4d41-86d6-c11dde89be6e-config-volume" (OuterVolumeSpecName: "config-volume") pod "d75d9dbd-f149-4d41-86d6-c11dde89be6e" (UID: "d75d9dbd-f149-4d41-86d6-c11dde89be6e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:15:03 crc kubenswrapper[4809]: I0930 00:15:03.082863 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d75d9dbd-f149-4d41-86d6-c11dde89be6e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d75d9dbd-f149-4d41-86d6-c11dde89be6e" (UID: "d75d9dbd-f149-4d41-86d6-c11dde89be6e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:15:03 crc kubenswrapper[4809]: I0930 00:15:03.092035 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d75d9dbd-f149-4d41-86d6-c11dde89be6e-kube-api-access-ldkj2" (OuterVolumeSpecName: "kube-api-access-ldkj2") pod "d75d9dbd-f149-4d41-86d6-c11dde89be6e" (UID: "d75d9dbd-f149-4d41-86d6-c11dde89be6e"). InnerVolumeSpecName "kube-api-access-ldkj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:15:03 crc kubenswrapper[4809]: I0930 00:15:03.174922 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldkj2\" (UniqueName: \"kubernetes.io/projected/d75d9dbd-f149-4d41-86d6-c11dde89be6e-kube-api-access-ldkj2\") on node \"crc\" DevicePath \"\"" Sep 30 00:15:03 crc kubenswrapper[4809]: I0930 00:15:03.174966 4809 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d75d9dbd-f149-4d41-86d6-c11dde89be6e-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 00:15:03 crc kubenswrapper[4809]: I0930 00:15:03.174979 4809 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d75d9dbd-f149-4d41-86d6-c11dde89be6e-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 00:15:03 crc kubenswrapper[4809]: I0930 00:15:03.766215 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6" event={"ID":"d75d9dbd-f149-4d41-86d6-c11dde89be6e","Type":"ContainerDied","Data":"61dd4461fef1aaea88cd9a99b3b0865ea6d5d3c9e1bc61d0656a54923aac5155"} Sep 30 00:15:03 crc kubenswrapper[4809]: I0930 00:15:03.766552 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61dd4461fef1aaea88cd9a99b3b0865ea6d5d3c9e1bc61d0656a54923aac5155" Sep 30 00:15:03 crc kubenswrapper[4809]: I0930 00:15:03.766280 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6" Sep 30 00:15:25 crc kubenswrapper[4809]: I0930 00:15:25.324602 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:15:25 crc kubenswrapper[4809]: I0930 00:15:25.325271 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.776741 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-r67kd"] Sep 30 00:15:43 crc kubenswrapper[4809]: E0930 00:15:43.777605 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d75d9dbd-f149-4d41-86d6-c11dde89be6e" containerName="collect-profiles" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.777623 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="d75d9dbd-f149-4d41-86d6-c11dde89be6e" containerName="collect-profiles" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.777774 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="d75d9dbd-f149-4d41-86d6-c11dde89be6e" containerName="collect-profiles" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.778548 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.787401 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-r67kd"] Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.894344 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-registry-certificates\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.894532 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qw4zk\" (UniqueName: \"kubernetes.io/projected/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-kube-api-access-qw4zk\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.894659 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-installation-pull-secrets\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.894757 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-ca-trust-extracted\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.894828 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.894884 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-trusted-ca\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.894956 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-bound-sa-token\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.895046 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-registry-tls\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.915893 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.996330 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-registry-certificates\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.996411 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qw4zk\" (UniqueName: \"kubernetes.io/projected/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-kube-api-access-qw4zk\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.996445 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-installation-pull-secrets\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.996477 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-ca-trust-extracted\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.996510 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-trusted-ca\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.996540 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-bound-sa-token\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.996954 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-registry-tls\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.997430 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-ca-trust-extracted\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.997671 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-registry-certificates\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:43 crc kubenswrapper[4809]: I0930 00:15:43.998244 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-trusted-ca\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:44 crc kubenswrapper[4809]: I0930 00:15:44.002406 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-installation-pull-secrets\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:44 crc kubenswrapper[4809]: I0930 00:15:44.002451 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-registry-tls\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:44 crc kubenswrapper[4809]: I0930 00:15:44.012591 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-bound-sa-token\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:44 crc kubenswrapper[4809]: I0930 00:15:44.013084 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qw4zk\" (UniqueName: \"kubernetes.io/projected/35f21ee2-6326-4dca-84a0-cc46e1c8aeab-kube-api-access-qw4zk\") pod \"image-registry-66df7c8f76-r67kd\" (UID: \"35f21ee2-6326-4dca-84a0-cc46e1c8aeab\") " pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:44 crc kubenswrapper[4809]: I0930 00:15:44.094026 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:44 crc kubenswrapper[4809]: I0930 00:15:44.271404 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-r67kd"] Sep 30 00:15:45 crc kubenswrapper[4809]: I0930 00:15:45.026951 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" event={"ID":"35f21ee2-6326-4dca-84a0-cc46e1c8aeab","Type":"ContainerStarted","Data":"0efd9e4beff7119884f65e53b2cf5d96c1196d8dc473f1378d66179ccf406a47"} Sep 30 00:15:45 crc kubenswrapper[4809]: I0930 00:15:45.027000 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" event={"ID":"35f21ee2-6326-4dca-84a0-cc46e1c8aeab","Type":"ContainerStarted","Data":"4aa7e87d48b33c7eaf96b53dc315edd902d7ad7297a2701e748aec16bd52b50a"} Sep 30 00:15:45 crc kubenswrapper[4809]: I0930 00:15:45.027132 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:15:55 crc kubenswrapper[4809]: I0930 00:15:55.325543 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:15:55 crc kubenswrapper[4809]: I0930 00:15:55.326020 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:15:55 crc kubenswrapper[4809]: I0930 00:15:55.326067 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:15:55 crc kubenswrapper[4809]: I0930 00:15:55.326700 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"29022887a4c6fb6f640d22610d08caf7b703bbcb52c661cad5e3c432c0c8a806"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:15:55 crc kubenswrapper[4809]: I0930 00:15:55.326749 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://29022887a4c6fb6f640d22610d08caf7b703bbcb52c661cad5e3c432c0c8a806" gracePeriod=600 Sep 30 00:15:56 crc kubenswrapper[4809]: I0930 00:15:56.131480 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="29022887a4c6fb6f640d22610d08caf7b703bbcb52c661cad5e3c432c0c8a806" exitCode=0 Sep 30 00:15:56 crc kubenswrapper[4809]: I0930 00:15:56.131550 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"29022887a4c6fb6f640d22610d08caf7b703bbcb52c661cad5e3c432c0c8a806"} Sep 30 00:15:56 crc kubenswrapper[4809]: I0930 00:15:56.132499 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"cdeffa51f659e763563b4d5a30aef21919c4d89a6729000221e77d2e14a6b1d2"} Sep 30 00:15:56 crc kubenswrapper[4809]: I0930 00:15:56.132577 4809 scope.go:117] "RemoveContainer" containerID="68da98a1dab3c337253e69b93f0b94bc1ef55564777755a42fde39d860e0a408" Sep 30 00:15:56 crc kubenswrapper[4809]: I0930 00:15:56.159809 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" podStartSLOduration=13.159769581 podStartE2EDuration="13.159769581s" podCreationTimestamp="2025-09-30 00:15:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:15:45.055111656 +0000 UTC m=+396.091361064" watchObservedRunningTime="2025-09-30 00:15:56.159769581 +0000 UTC m=+407.196019029" Sep 30 00:16:04 crc kubenswrapper[4809]: I0930 00:16:04.106102 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-r67kd" Sep 30 00:16:04 crc kubenswrapper[4809]: I0930 00:16:04.161445 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-lzb22"] Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.215248 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" podUID="00290c40-68ba-4728-b5fe-3ff985476bcf" containerName="registry" containerID="cri-o://d20fef6941a572fa3b135c63bf568331e85d4e4292ab2ce69fec9a1bd4205b23" gracePeriod=30 Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.271435 4809 patch_prober.go:28] interesting pod/image-registry-697d97f7c8-lzb22 container/registry namespace/openshift-image-registry: Readiness probe status=failure output="Get \"https://10.217.0.39:5000/healthz\": dial tcp 10.217.0.39:5000: connect: connection refused" start-of-body= Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.271522 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" podUID="00290c40-68ba-4728-b5fe-3ff985476bcf" containerName="registry" probeResult="failure" output="Get \"https://10.217.0.39:5000/healthz\": dial tcp 10.217.0.39:5000: connect: connection refused" Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.381814 4809 generic.go:334] "Generic (PLEG): container finished" podID="00290c40-68ba-4728-b5fe-3ff985476bcf" containerID="d20fef6941a572fa3b135c63bf568331e85d4e4292ab2ce69fec9a1bd4205b23" exitCode=0 Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.381876 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" event={"ID":"00290c40-68ba-4728-b5fe-3ff985476bcf","Type":"ContainerDied","Data":"d20fef6941a572fa3b135c63bf568331e85d4e4292ab2ce69fec9a1bd4205b23"} Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.614009 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.721720 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/00290c40-68ba-4728-b5fe-3ff985476bcf-ca-trust-extracted\") pod \"00290c40-68ba-4728-b5fe-3ff985476bcf\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.721924 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"00290c40-68ba-4728-b5fe-3ff985476bcf\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.722061 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/00290c40-68ba-4728-b5fe-3ff985476bcf-installation-pull-secrets\") pod \"00290c40-68ba-4728-b5fe-3ff985476bcf\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.722141 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/00290c40-68ba-4728-b5fe-3ff985476bcf-registry-tls\") pod \"00290c40-68ba-4728-b5fe-3ff985476bcf\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.722186 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vp6bf\" (UniqueName: \"kubernetes.io/projected/00290c40-68ba-4728-b5fe-3ff985476bcf-kube-api-access-vp6bf\") pod \"00290c40-68ba-4728-b5fe-3ff985476bcf\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.722224 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/00290c40-68ba-4728-b5fe-3ff985476bcf-registry-certificates\") pod \"00290c40-68ba-4728-b5fe-3ff985476bcf\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.722272 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/00290c40-68ba-4728-b5fe-3ff985476bcf-trusted-ca\") pod \"00290c40-68ba-4728-b5fe-3ff985476bcf\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.722307 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/00290c40-68ba-4728-b5fe-3ff985476bcf-bound-sa-token\") pod \"00290c40-68ba-4728-b5fe-3ff985476bcf\" (UID: \"00290c40-68ba-4728-b5fe-3ff985476bcf\") " Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.724299 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00290c40-68ba-4728-b5fe-3ff985476bcf-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "00290c40-68ba-4728-b5fe-3ff985476bcf" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.729944 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00290c40-68ba-4728-b5fe-3ff985476bcf-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "00290c40-68ba-4728-b5fe-3ff985476bcf" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.732704 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00290c40-68ba-4728-b5fe-3ff985476bcf-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "00290c40-68ba-4728-b5fe-3ff985476bcf" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.733121 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00290c40-68ba-4728-b5fe-3ff985476bcf-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "00290c40-68ba-4728-b5fe-3ff985476bcf" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.734193 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00290c40-68ba-4728-b5fe-3ff985476bcf-kube-api-access-vp6bf" (OuterVolumeSpecName: "kube-api-access-vp6bf") pod "00290c40-68ba-4728-b5fe-3ff985476bcf" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf"). InnerVolumeSpecName "kube-api-access-vp6bf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.734680 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00290c40-68ba-4728-b5fe-3ff985476bcf-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "00290c40-68ba-4728-b5fe-3ff985476bcf" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.737740 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "00290c40-68ba-4728-b5fe-3ff985476bcf" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.781369 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00290c40-68ba-4728-b5fe-3ff985476bcf-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "00290c40-68ba-4728-b5fe-3ff985476bcf" (UID: "00290c40-68ba-4728-b5fe-3ff985476bcf"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.824529 4809 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/00290c40-68ba-4728-b5fe-3ff985476bcf-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.824849 4809 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/00290c40-68ba-4728-b5fe-3ff985476bcf-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.824876 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vp6bf\" (UniqueName: \"kubernetes.io/projected/00290c40-68ba-4728-b5fe-3ff985476bcf-kube-api-access-vp6bf\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.824974 4809 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/00290c40-68ba-4728-b5fe-3ff985476bcf-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.825027 4809 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/00290c40-68ba-4728-b5fe-3ff985476bcf-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.825058 4809 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/00290c40-68ba-4728-b5fe-3ff985476bcf-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:29 crc kubenswrapper[4809]: I0930 00:16:29.825087 4809 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/00290c40-68ba-4728-b5fe-3ff985476bcf-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:30 crc kubenswrapper[4809]: I0930 00:16:30.392798 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" event={"ID":"00290c40-68ba-4728-b5fe-3ff985476bcf","Type":"ContainerDied","Data":"c41804cb9eb9030d516ad4aa8fe3d7b8ee5b9dab0d303cfbdf00e9182e6ee1a5"} Sep 30 00:16:30 crc kubenswrapper[4809]: I0930 00:16:30.392883 4809 scope.go:117] "RemoveContainer" containerID="d20fef6941a572fa3b135c63bf568331e85d4e4292ab2ce69fec9a1bd4205b23" Sep 30 00:16:30 crc kubenswrapper[4809]: I0930 00:16:30.392990 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-lzb22" Sep 30 00:16:30 crc kubenswrapper[4809]: I0930 00:16:30.443180 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-lzb22"] Sep 30 00:16:30 crc kubenswrapper[4809]: I0930 00:16:30.449439 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-lzb22"] Sep 30 00:16:31 crc kubenswrapper[4809]: I0930 00:16:31.702624 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00290c40-68ba-4728-b5fe-3ff985476bcf" path="/var/lib/kubelet/pods/00290c40-68ba-4728-b5fe-3ff985476bcf/volumes" Sep 30 00:17:55 crc kubenswrapper[4809]: I0930 00:17:55.324866 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:17:55 crc kubenswrapper[4809]: I0930 00:17:55.325870 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:18:25 crc kubenswrapper[4809]: I0930 00:18:25.325429 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:18:25 crc kubenswrapper[4809]: I0930 00:18:25.325866 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:18:55 crc kubenswrapper[4809]: I0930 00:18:55.324838 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:18:55 crc kubenswrapper[4809]: I0930 00:18:55.325381 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:18:55 crc kubenswrapper[4809]: I0930 00:18:55.325421 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:18:55 crc kubenswrapper[4809]: I0930 00:18:55.326027 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cdeffa51f659e763563b4d5a30aef21919c4d89a6729000221e77d2e14a6b1d2"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:18:55 crc kubenswrapper[4809]: I0930 00:18:55.326084 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://cdeffa51f659e763563b4d5a30aef21919c4d89a6729000221e77d2e14a6b1d2" gracePeriod=600 Sep 30 00:18:56 crc kubenswrapper[4809]: I0930 00:18:56.335633 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="cdeffa51f659e763563b4d5a30aef21919c4d89a6729000221e77d2e14a6b1d2" exitCode=0 Sep 30 00:18:56 crc kubenswrapper[4809]: I0930 00:18:56.335670 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"cdeffa51f659e763563b4d5a30aef21919c4d89a6729000221e77d2e14a6b1d2"} Sep 30 00:18:56 crc kubenswrapper[4809]: I0930 00:18:56.335982 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"165bd6d9351c1f3568d24afda65f12e5fa3c3ab08edb7e15f4eaa480ba979d2d"} Sep 30 00:18:56 crc kubenswrapper[4809]: I0930 00:18:56.336003 4809 scope.go:117] "RemoveContainer" containerID="29022887a4c6fb6f640d22610d08caf7b703bbcb52c661cad5e3c432c0c8a806" Sep 30 00:19:07 crc kubenswrapper[4809]: I0930 00:19:07.076036 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw"] Sep 30 00:19:07 crc kubenswrapper[4809]: E0930 00:19:07.076852 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00290c40-68ba-4728-b5fe-3ff985476bcf" containerName="registry" Sep 30 00:19:07 crc kubenswrapper[4809]: I0930 00:19:07.076870 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="00290c40-68ba-4728-b5fe-3ff985476bcf" containerName="registry" Sep 30 00:19:07 crc kubenswrapper[4809]: I0930 00:19:07.077002 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="00290c40-68ba-4728-b5fe-3ff985476bcf" containerName="registry" Sep 30 00:19:07 crc kubenswrapper[4809]: I0930 00:19:07.077864 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw" Sep 30 00:19:07 crc kubenswrapper[4809]: I0930 00:19:07.079571 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 00:19:07 crc kubenswrapper[4809]: I0930 00:19:07.091542 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw"] Sep 30 00:19:07 crc kubenswrapper[4809]: I0930 00:19:07.218865 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a86f4233-76f9-4358-85e9-48cc83757d13-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw\" (UID: \"a86f4233-76f9-4358-85e9-48cc83757d13\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw" Sep 30 00:19:07 crc kubenswrapper[4809]: I0930 00:19:07.218919 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zm9gr\" (UniqueName: \"kubernetes.io/projected/a86f4233-76f9-4358-85e9-48cc83757d13-kube-api-access-zm9gr\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw\" (UID: \"a86f4233-76f9-4358-85e9-48cc83757d13\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw" Sep 30 00:19:07 crc kubenswrapper[4809]: I0930 00:19:07.218987 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a86f4233-76f9-4358-85e9-48cc83757d13-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw\" (UID: \"a86f4233-76f9-4358-85e9-48cc83757d13\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw" Sep 30 00:19:07 crc kubenswrapper[4809]: I0930 00:19:07.320057 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a86f4233-76f9-4358-85e9-48cc83757d13-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw\" (UID: \"a86f4233-76f9-4358-85e9-48cc83757d13\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw" Sep 30 00:19:07 crc kubenswrapper[4809]: I0930 00:19:07.320148 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a86f4233-76f9-4358-85e9-48cc83757d13-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw\" (UID: \"a86f4233-76f9-4358-85e9-48cc83757d13\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw" Sep 30 00:19:07 crc kubenswrapper[4809]: I0930 00:19:07.320208 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zm9gr\" (UniqueName: \"kubernetes.io/projected/a86f4233-76f9-4358-85e9-48cc83757d13-kube-api-access-zm9gr\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw\" (UID: \"a86f4233-76f9-4358-85e9-48cc83757d13\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw" Sep 30 00:19:07 crc kubenswrapper[4809]: I0930 00:19:07.320748 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a86f4233-76f9-4358-85e9-48cc83757d13-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw\" (UID: \"a86f4233-76f9-4358-85e9-48cc83757d13\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw" Sep 30 00:19:07 crc kubenswrapper[4809]: I0930 00:19:07.320998 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a86f4233-76f9-4358-85e9-48cc83757d13-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw\" (UID: \"a86f4233-76f9-4358-85e9-48cc83757d13\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw" Sep 30 00:19:07 crc kubenswrapper[4809]: I0930 00:19:07.352352 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zm9gr\" (UniqueName: \"kubernetes.io/projected/a86f4233-76f9-4358-85e9-48cc83757d13-kube-api-access-zm9gr\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw\" (UID: \"a86f4233-76f9-4358-85e9-48cc83757d13\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw" Sep 30 00:19:07 crc kubenswrapper[4809]: I0930 00:19:07.393969 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw" Sep 30 00:19:07 crc kubenswrapper[4809]: I0930 00:19:07.700014 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw"] Sep 30 00:19:08 crc kubenswrapper[4809]: I0930 00:19:08.420910 4809 generic.go:334] "Generic (PLEG): container finished" podID="a86f4233-76f9-4358-85e9-48cc83757d13" containerID="6cb9fb723db973ba8d97224cf09118754e8ad55ce82940143e1903ae25e83e4e" exitCode=0 Sep 30 00:19:08 crc kubenswrapper[4809]: I0930 00:19:08.421008 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw" event={"ID":"a86f4233-76f9-4358-85e9-48cc83757d13","Type":"ContainerDied","Data":"6cb9fb723db973ba8d97224cf09118754e8ad55ce82940143e1903ae25e83e4e"} Sep 30 00:19:08 crc kubenswrapper[4809]: I0930 00:19:08.421180 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw" event={"ID":"a86f4233-76f9-4358-85e9-48cc83757d13","Type":"ContainerStarted","Data":"88f8e12ec556eaacbf128002da2ea7fe703a7d2150030e05ea75797ffc706cfe"} Sep 30 00:19:08 crc kubenswrapper[4809]: I0930 00:19:08.422732 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 00:19:10 crc kubenswrapper[4809]: I0930 00:19:10.444901 4809 generic.go:334] "Generic (PLEG): container finished" podID="a86f4233-76f9-4358-85e9-48cc83757d13" containerID="d174202010462ee8d7313417a380366473d2cd58f3c4c9a6d813355ad90d6265" exitCode=0 Sep 30 00:19:10 crc kubenswrapper[4809]: I0930 00:19:10.445032 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw" event={"ID":"a86f4233-76f9-4358-85e9-48cc83757d13","Type":"ContainerDied","Data":"d174202010462ee8d7313417a380366473d2cd58f3c4c9a6d813355ad90d6265"} Sep 30 00:19:11 crc kubenswrapper[4809]: I0930 00:19:11.457894 4809 generic.go:334] "Generic (PLEG): container finished" podID="a86f4233-76f9-4358-85e9-48cc83757d13" containerID="36cb63401a950b081e82feb4a49c54c57525179ef7bc39ee0f468995e4222d8f" exitCode=0 Sep 30 00:19:11 crc kubenswrapper[4809]: I0930 00:19:11.457960 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw" event={"ID":"a86f4233-76f9-4358-85e9-48cc83757d13","Type":"ContainerDied","Data":"36cb63401a950b081e82feb4a49c54c57525179ef7bc39ee0f468995e4222d8f"} Sep 30 00:19:12 crc kubenswrapper[4809]: I0930 00:19:12.806341 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw" Sep 30 00:19:12 crc kubenswrapper[4809]: I0930 00:19:12.896026 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zm9gr\" (UniqueName: \"kubernetes.io/projected/a86f4233-76f9-4358-85e9-48cc83757d13-kube-api-access-zm9gr\") pod \"a86f4233-76f9-4358-85e9-48cc83757d13\" (UID: \"a86f4233-76f9-4358-85e9-48cc83757d13\") " Sep 30 00:19:12 crc kubenswrapper[4809]: I0930 00:19:12.896359 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a86f4233-76f9-4358-85e9-48cc83757d13-bundle\") pod \"a86f4233-76f9-4358-85e9-48cc83757d13\" (UID: \"a86f4233-76f9-4358-85e9-48cc83757d13\") " Sep 30 00:19:12 crc kubenswrapper[4809]: I0930 00:19:12.896519 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a86f4233-76f9-4358-85e9-48cc83757d13-util\") pod \"a86f4233-76f9-4358-85e9-48cc83757d13\" (UID: \"a86f4233-76f9-4358-85e9-48cc83757d13\") " Sep 30 00:19:12 crc kubenswrapper[4809]: I0930 00:19:12.898096 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a86f4233-76f9-4358-85e9-48cc83757d13-bundle" (OuterVolumeSpecName: "bundle") pod "a86f4233-76f9-4358-85e9-48cc83757d13" (UID: "a86f4233-76f9-4358-85e9-48cc83757d13"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:19:12 crc kubenswrapper[4809]: I0930 00:19:12.901095 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a86f4233-76f9-4358-85e9-48cc83757d13-kube-api-access-zm9gr" (OuterVolumeSpecName: "kube-api-access-zm9gr") pod "a86f4233-76f9-4358-85e9-48cc83757d13" (UID: "a86f4233-76f9-4358-85e9-48cc83757d13"). InnerVolumeSpecName "kube-api-access-zm9gr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:19:12 crc kubenswrapper[4809]: I0930 00:19:12.910290 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a86f4233-76f9-4358-85e9-48cc83757d13-util" (OuterVolumeSpecName: "util") pod "a86f4233-76f9-4358-85e9-48cc83757d13" (UID: "a86f4233-76f9-4358-85e9-48cc83757d13"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:19:12 crc kubenswrapper[4809]: I0930 00:19:12.997805 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zm9gr\" (UniqueName: \"kubernetes.io/projected/a86f4233-76f9-4358-85e9-48cc83757d13-kube-api-access-zm9gr\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:12 crc kubenswrapper[4809]: I0930 00:19:12.998137 4809 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a86f4233-76f9-4358-85e9-48cc83757d13-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:12 crc kubenswrapper[4809]: I0930 00:19:12.998153 4809 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a86f4233-76f9-4358-85e9-48cc83757d13-util\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:13 crc kubenswrapper[4809]: I0930 00:19:13.476247 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw" event={"ID":"a86f4233-76f9-4358-85e9-48cc83757d13","Type":"ContainerDied","Data":"88f8e12ec556eaacbf128002da2ea7fe703a7d2150030e05ea75797ffc706cfe"} Sep 30 00:19:13 crc kubenswrapper[4809]: I0930 00:19:13.476287 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88f8e12ec556eaacbf128002da2ea7fe703a7d2150030e05ea75797ffc706cfe" Sep 30 00:19:13 crc kubenswrapper[4809]: I0930 00:19:13.476402 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw" Sep 30 00:19:18 crc kubenswrapper[4809]: I0930 00:19:18.784911 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-lkdqg"] Sep 30 00:19:18 crc kubenswrapper[4809]: I0930 00:19:18.785547 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovn-controller" containerID="cri-o://a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf" gracePeriod=30 Sep 30 00:19:18 crc kubenswrapper[4809]: I0930 00:19:18.785674 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="northd" containerID="cri-o://ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6" gracePeriod=30 Sep 30 00:19:18 crc kubenswrapper[4809]: I0930 00:19:18.785632 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="nbdb" containerID="cri-o://f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2" gracePeriod=30 Sep 30 00:19:18 crc kubenswrapper[4809]: I0930 00:19:18.785715 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb" gracePeriod=30 Sep 30 00:19:18 crc kubenswrapper[4809]: I0930 00:19:18.785745 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="kube-rbac-proxy-node" containerID="cri-o://ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4" gracePeriod=30 Sep 30 00:19:18 crc kubenswrapper[4809]: I0930 00:19:18.785766 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="sbdb" containerID="cri-o://bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5" gracePeriod=30 Sep 30 00:19:18 crc kubenswrapper[4809]: I0930 00:19:18.785816 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovn-acl-logging" containerID="cri-o://1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88" gracePeriod=30 Sep 30 00:19:18 crc kubenswrapper[4809]: I0930 00:19:18.832833 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovnkube-controller" containerID="cri-o://044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c" gracePeriod=30 Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.189906 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lkdqg_5ec79e76-2a92-48e7-a55f-f8e630b00ed5/ovnkube-controller/3.log" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.192130 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lkdqg_5ec79e76-2a92-48e7-a55f-f8e630b00ed5/ovn-acl-logging/0.log" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.192630 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lkdqg_5ec79e76-2a92-48e7-a55f-f8e630b00ed5/ovn-controller/0.log" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.193089 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246177 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4lwrb"] Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.246391 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246412 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.246426 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovnkube-controller" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246435 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovnkube-controller" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.246445 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovn-controller" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246455 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovn-controller" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.246466 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovnkube-controller" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246473 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovnkube-controller" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.246481 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a86f4233-76f9-4358-85e9-48cc83757d13" containerName="pull" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246488 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a86f4233-76f9-4358-85e9-48cc83757d13" containerName="pull" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.246500 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovnkube-controller" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246508 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovnkube-controller" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.246519 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="sbdb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246526 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="sbdb" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.246536 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovnkube-controller" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246544 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovnkube-controller" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.246552 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="kube-rbac-proxy-node" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246559 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="kube-rbac-proxy-node" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.246568 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a86f4233-76f9-4358-85e9-48cc83757d13" containerName="util" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246575 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a86f4233-76f9-4358-85e9-48cc83757d13" containerName="util" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.246584 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a86f4233-76f9-4358-85e9-48cc83757d13" containerName="extract" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246590 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a86f4233-76f9-4358-85e9-48cc83757d13" containerName="extract" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.246596 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="kubecfg-setup" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246601 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="kubecfg-setup" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.246611 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovn-acl-logging" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246617 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovn-acl-logging" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.246626 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="northd" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246632 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="northd" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.246656 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="nbdb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246662 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="nbdb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246744 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="kube-rbac-proxy-node" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246754 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246765 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovn-controller" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246774 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovnkube-controller" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246781 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovn-acl-logging" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246787 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovnkube-controller" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246794 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovnkube-controller" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246802 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="a86f4233-76f9-4358-85e9-48cc83757d13" containerName="extract" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246810 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="northd" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246817 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovnkube-controller" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246824 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="nbdb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246832 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="sbdb" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.246910 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovnkube-controller" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.246917 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovnkube-controller" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.247003 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerName="ovnkube-controller" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.248387 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.289637 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-ovnkube-script-lib\") pod \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.289732 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-etc-openvswitch\") pod \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.289760 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-var-lib-openvswitch\") pod \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.289785 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-cni-bin\") pod \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.289803 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-systemd-units\") pod \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.289824 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-run-ovn\") pod \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.289845 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-ovn-node-metrics-cert\") pod \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.289864 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-log-socket\") pod \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.289891 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-run-openvswitch\") pod \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.289930 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kdrpr\" (UniqueName: \"kubernetes.io/projected/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-kube-api-access-kdrpr\") pod \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.289951 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-run-netns\") pod \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.289974 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-ovnkube-config\") pod \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290000 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-cni-netd\") pod \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290035 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-kubelet\") pod \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290063 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-slash\") pod \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290081 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-node-log\") pod \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290115 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-run-systemd\") pod \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290123 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "5ec79e76-2a92-48e7-a55f-f8e630b00ed5" (UID: "5ec79e76-2a92-48e7-a55f-f8e630b00ed5"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290138 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-env-overrides\") pod \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290200 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290246 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-run-ovn-kubernetes\") pod \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\" (UID: \"5ec79e76-2a92-48e7-a55f-f8e630b00ed5\") " Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290509 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "5ec79e76-2a92-48e7-a55f-f8e630b00ed5" (UID: "5ec79e76-2a92-48e7-a55f-f8e630b00ed5"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290502 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "5ec79e76-2a92-48e7-a55f-f8e630b00ed5" (UID: "5ec79e76-2a92-48e7-a55f-f8e630b00ed5"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290617 4809 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290662 4809 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290673 4809 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290722 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "5ec79e76-2a92-48e7-a55f-f8e630b00ed5" (UID: "5ec79e76-2a92-48e7-a55f-f8e630b00ed5"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290767 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-log-socket" (OuterVolumeSpecName: "log-socket") pod "5ec79e76-2a92-48e7-a55f-f8e630b00ed5" (UID: "5ec79e76-2a92-48e7-a55f-f8e630b00ed5"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290765 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "5ec79e76-2a92-48e7-a55f-f8e630b00ed5" (UID: "5ec79e76-2a92-48e7-a55f-f8e630b00ed5"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290790 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-node-log" (OuterVolumeSpecName: "node-log") pod "5ec79e76-2a92-48e7-a55f-f8e630b00ed5" (UID: "5ec79e76-2a92-48e7-a55f-f8e630b00ed5"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290790 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "5ec79e76-2a92-48e7-a55f-f8e630b00ed5" (UID: "5ec79e76-2a92-48e7-a55f-f8e630b00ed5"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290810 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-slash" (OuterVolumeSpecName: "host-slash") pod "5ec79e76-2a92-48e7-a55f-f8e630b00ed5" (UID: "5ec79e76-2a92-48e7-a55f-f8e630b00ed5"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290826 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "5ec79e76-2a92-48e7-a55f-f8e630b00ed5" (UID: "5ec79e76-2a92-48e7-a55f-f8e630b00ed5"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290826 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "5ec79e76-2a92-48e7-a55f-f8e630b00ed5" (UID: "5ec79e76-2a92-48e7-a55f-f8e630b00ed5"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290831 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "5ec79e76-2a92-48e7-a55f-f8e630b00ed5" (UID: "5ec79e76-2a92-48e7-a55f-f8e630b00ed5"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290893 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "5ec79e76-2a92-48e7-a55f-f8e630b00ed5" (UID: "5ec79e76-2a92-48e7-a55f-f8e630b00ed5"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.290895 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "5ec79e76-2a92-48e7-a55f-f8e630b00ed5" (UID: "5ec79e76-2a92-48e7-a55f-f8e630b00ed5"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.291088 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "5ec79e76-2a92-48e7-a55f-f8e630b00ed5" (UID: "5ec79e76-2a92-48e7-a55f-f8e630b00ed5"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.291157 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "5ec79e76-2a92-48e7-a55f-f8e630b00ed5" (UID: "5ec79e76-2a92-48e7-a55f-f8e630b00ed5"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.291235 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "5ec79e76-2a92-48e7-a55f-f8e630b00ed5" (UID: "5ec79e76-2a92-48e7-a55f-f8e630b00ed5"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.296905 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "5ec79e76-2a92-48e7-a55f-f8e630b00ed5" (UID: "5ec79e76-2a92-48e7-a55f-f8e630b00ed5"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.302953 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-kube-api-access-kdrpr" (OuterVolumeSpecName: "kube-api-access-kdrpr") pod "5ec79e76-2a92-48e7-a55f-f8e630b00ed5" (UID: "5ec79e76-2a92-48e7-a55f-f8e630b00ed5"). InnerVolumeSpecName "kube-api-access-kdrpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.306679 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "5ec79e76-2a92-48e7-a55f-f8e630b00ed5" (UID: "5ec79e76-2a92-48e7-a55f-f8e630b00ed5"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.392293 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-run-netns\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.392375 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f7e35864-9ab3-4ad9-906b-2411517e8caf-ovn-node-metrics-cert\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.392443 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-var-lib-openvswitch\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.392464 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-kubelet\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.392482 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f7e35864-9ab3-4ad9-906b-2411517e8caf-env-overrides\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.392534 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-run-systemd\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.392553 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvrcl\" (UniqueName: \"kubernetes.io/projected/f7e35864-9ab3-4ad9-906b-2411517e8caf-kube-api-access-lvrcl\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.392574 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-run-openvswitch\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.392597 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-cni-netd\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.392618 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-run-ovn\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.392659 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-log-socket\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.392702 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f7e35864-9ab3-4ad9-906b-2411517e8caf-ovnkube-config\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.392725 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-systemd-units\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.392745 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-run-ovn-kubernetes\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.392777 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-node-log\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.392984 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-etc-openvswitch\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393082 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f7e35864-9ab3-4ad9-906b-2411517e8caf-ovnkube-script-lib\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393153 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393194 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-slash\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393253 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-cni-bin\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393404 4809 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-run-systemd\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393428 4809 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393445 4809 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393457 4809 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393472 4809 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-cni-bin\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393484 4809 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-systemd-units\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393496 4809 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393508 4809 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393522 4809 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-log-socket\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393535 4809 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-run-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393549 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kdrpr\" (UniqueName: \"kubernetes.io/projected/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-kube-api-access-kdrpr\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393563 4809 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-run-netns\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393577 4809 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393590 4809 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-cni-netd\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393602 4809 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-kubelet\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393616 4809 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-host-slash\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.393629 4809 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5ec79e76-2a92-48e7-a55f-f8e630b00ed5-node-log\") on node \"crc\" DevicePath \"\"" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495186 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f7e35864-9ab3-4ad9-906b-2411517e8caf-ovnkube-config\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495244 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-systemd-units\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495276 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-run-ovn-kubernetes\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495299 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-node-log\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495332 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-etc-openvswitch\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495357 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f7e35864-9ab3-4ad9-906b-2411517e8caf-ovnkube-script-lib\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495382 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495407 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-slash\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495453 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-cni-bin\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495445 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-systemd-units\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495517 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-run-netns\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495455 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-run-ovn-kubernetes\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495484 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-run-netns\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495574 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-slash\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495581 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f7e35864-9ab3-4ad9-906b-2411517e8caf-ovn-node-metrics-cert\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495607 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495618 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-var-lib-openvswitch\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495666 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-node-log\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495666 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f7e35864-9ab3-4ad9-906b-2411517e8caf-env-overrides\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495727 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-kubelet\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495778 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-run-systemd\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495798 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvrcl\" (UniqueName: \"kubernetes.io/projected/f7e35864-9ab3-4ad9-906b-2411517e8caf-kube-api-access-lvrcl\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495822 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-run-openvswitch\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495844 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-cni-netd\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495868 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-run-ovn\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.495901 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-log-socket\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.496041 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-log-socket\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.496065 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-kubelet\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.496087 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-run-systemd\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.496155 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f7e35864-9ab3-4ad9-906b-2411517e8caf-env-overrides\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.496182 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f7e35864-9ab3-4ad9-906b-2411517e8caf-ovnkube-script-lib\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.496205 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-cni-bin\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.496184 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f7e35864-9ab3-4ad9-906b-2411517e8caf-ovnkube-config\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.496239 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-var-lib-openvswitch\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.496253 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-etc-openvswitch\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.496279 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-host-cni-netd\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.496285 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-run-openvswitch\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.496309 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f7e35864-9ab3-4ad9-906b-2411517e8caf-run-ovn\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.501319 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f7e35864-9ab3-4ad9-906b-2411517e8caf-ovn-node-metrics-cert\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.516966 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lkdqg_5ec79e76-2a92-48e7-a55f-f8e630b00ed5/ovnkube-controller/3.log" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.519776 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lkdqg_5ec79e76-2a92-48e7-a55f-f8e630b00ed5/ovn-acl-logging/0.log" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.520435 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lkdqg_5ec79e76-2a92-48e7-a55f-f8e630b00ed5/ovn-controller/0.log" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.520912 4809 generic.go:334] "Generic (PLEG): container finished" podID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerID="044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c" exitCode=0 Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.521016 4809 generic.go:334] "Generic (PLEG): container finished" podID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerID="bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5" exitCode=0 Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.521096 4809 generic.go:334] "Generic (PLEG): container finished" podID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerID="f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2" exitCode=0 Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.521206 4809 generic.go:334] "Generic (PLEG): container finished" podID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerID="ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6" exitCode=0 Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.521281 4809 generic.go:334] "Generic (PLEG): container finished" podID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerID="605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb" exitCode=0 Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.521345 4809 generic.go:334] "Generic (PLEG): container finished" podID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerID="ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4" exitCode=0 Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.521405 4809 generic.go:334] "Generic (PLEG): container finished" podID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerID="1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88" exitCode=143 Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.521467 4809 generic.go:334] "Generic (PLEG): container finished" podID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" containerID="a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf" exitCode=143 Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.521575 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerDied","Data":"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.521713 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerDied","Data":"bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.521808 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerDied","Data":"f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.521876 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerDied","Data":"ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.521945 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerDied","Data":"605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.522020 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerDied","Data":"ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.522093 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.522165 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.522227 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.522296 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.522358 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.522427 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.522488 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.522564 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.522625 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.522697 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerDied","Data":"1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.522787 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.522853 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.522887 4809 scope.go:117] "RemoveContainer" containerID="044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.522913 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523073 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523092 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523100 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523106 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523112 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523118 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523126 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523145 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerDied","Data":"a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523164 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523172 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.522912 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523179 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523255 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523264 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523270 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523276 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523281 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523286 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523291 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523299 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lkdqg" event={"ID":"5ec79e76-2a92-48e7-a55f-f8e630b00ed5","Type":"ContainerDied","Data":"663df6a7998c7a81d329fbaaa888bbbdd7811e4068b0f467bed460e93317c1aa"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523308 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523317 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523322 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523327 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523332 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523338 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523343 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523348 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523353 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.523358 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.529517 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-h6xqr_efc7b2e1-7308-483a-9117-02e83c45a528/kube-multus/2.log" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.530308 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-h6xqr_efc7b2e1-7308-483a-9117-02e83c45a528/kube-multus/1.log" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.530352 4809 generic.go:334] "Generic (PLEG): container finished" podID="efc7b2e1-7308-483a-9117-02e83c45a528" containerID="f5c595bedd8ad8ab824b5c4f31b23ff132f383cd0e37b5ad71bd7e639e3a350a" exitCode=2 Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.530385 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-h6xqr" event={"ID":"efc7b2e1-7308-483a-9117-02e83c45a528","Type":"ContainerDied","Data":"f5c595bedd8ad8ab824b5c4f31b23ff132f383cd0e37b5ad71bd7e639e3a350a"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.530409 4809 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2c471200a61cc3d7cb632c1e3690f62cfe5565dfda25ae3701efd13482755885"} Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.530956 4809 scope.go:117] "RemoveContainer" containerID="f5c595bedd8ad8ab824b5c4f31b23ff132f383cd0e37b5ad71bd7e639e3a350a" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.531187 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-h6xqr_openshift-multus(efc7b2e1-7308-483a-9117-02e83c45a528)\"" pod="openshift-multus/multus-h6xqr" podUID="efc7b2e1-7308-483a-9117-02e83c45a528" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.540346 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvrcl\" (UniqueName: \"kubernetes.io/projected/f7e35864-9ab3-4ad9-906b-2411517e8caf-kube-api-access-lvrcl\") pod \"ovnkube-node-4lwrb\" (UID: \"f7e35864-9ab3-4ad9-906b-2411517e8caf\") " pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.551852 4809 scope.go:117] "RemoveContainer" containerID="6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.561523 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.563918 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-lkdqg"] Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.580307 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-lkdqg"] Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.587630 4809 scope.go:117] "RemoveContainer" containerID="bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.612962 4809 scope.go:117] "RemoveContainer" containerID="f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.641131 4809 scope.go:117] "RemoveContainer" containerID="ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.678824 4809 scope.go:117] "RemoveContainer" containerID="605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.696875 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ec79e76-2a92-48e7-a55f-f8e630b00ed5" path="/var/lib/kubelet/pods/5ec79e76-2a92-48e7-a55f-f8e630b00ed5/volumes" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.699398 4809 scope.go:117] "RemoveContainer" containerID="ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.715013 4809 scope.go:117] "RemoveContainer" containerID="1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.751357 4809 scope.go:117] "RemoveContainer" containerID="a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.819879 4809 scope.go:117] "RemoveContainer" containerID="c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.850926 4809 scope.go:117] "RemoveContainer" containerID="044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.851396 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c\": container with ID starting with 044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c not found: ID does not exist" containerID="044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.851437 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c"} err="failed to get container status \"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c\": rpc error: code = NotFound desc = could not find container \"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c\": container with ID starting with 044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.851464 4809 scope.go:117] "RemoveContainer" containerID="6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.851668 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f\": container with ID starting with 6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f not found: ID does not exist" containerID="6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.851688 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f"} err="failed to get container status \"6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f\": rpc error: code = NotFound desc = could not find container \"6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f\": container with ID starting with 6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.851701 4809 scope.go:117] "RemoveContainer" containerID="bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.851908 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\": container with ID starting with bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5 not found: ID does not exist" containerID="bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.851929 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5"} err="failed to get container status \"bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\": rpc error: code = NotFound desc = could not find container \"bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\": container with ID starting with bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.851953 4809 scope.go:117] "RemoveContainer" containerID="f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.852140 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\": container with ID starting with f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2 not found: ID does not exist" containerID="f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.852160 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2"} err="failed to get container status \"f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\": rpc error: code = NotFound desc = could not find container \"f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\": container with ID starting with f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.852184 4809 scope.go:117] "RemoveContainer" containerID="ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.852541 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\": container with ID starting with ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6 not found: ID does not exist" containerID="ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.852564 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6"} err="failed to get container status \"ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\": rpc error: code = NotFound desc = could not find container \"ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\": container with ID starting with ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.852576 4809 scope.go:117] "RemoveContainer" containerID="605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.852810 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\": container with ID starting with 605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb not found: ID does not exist" containerID="605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.852831 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb"} err="failed to get container status \"605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\": rpc error: code = NotFound desc = could not find container \"605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\": container with ID starting with 605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.852862 4809 scope.go:117] "RemoveContainer" containerID="ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.853074 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\": container with ID starting with ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4 not found: ID does not exist" containerID="ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.853097 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4"} err="failed to get container status \"ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\": rpc error: code = NotFound desc = could not find container \"ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\": container with ID starting with ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.853121 4809 scope.go:117] "RemoveContainer" containerID="1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.853307 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\": container with ID starting with 1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88 not found: ID does not exist" containerID="1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.853327 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88"} err="failed to get container status \"1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\": rpc error: code = NotFound desc = could not find container \"1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\": container with ID starting with 1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.853351 4809 scope.go:117] "RemoveContainer" containerID="a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.853538 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\": container with ID starting with a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf not found: ID does not exist" containerID="a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.853560 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf"} err="failed to get container status \"a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\": rpc error: code = NotFound desc = could not find container \"a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\": container with ID starting with a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.853584 4809 scope.go:117] "RemoveContainer" containerID="c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236" Sep 30 00:19:19 crc kubenswrapper[4809]: E0930 00:19:19.853850 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\": container with ID starting with c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236 not found: ID does not exist" containerID="c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.853886 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236"} err="failed to get container status \"c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\": rpc error: code = NotFound desc = could not find container \"c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\": container with ID starting with c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.853902 4809 scope.go:117] "RemoveContainer" containerID="044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.854124 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c"} err="failed to get container status \"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c\": rpc error: code = NotFound desc = could not find container \"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c\": container with ID starting with 044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.854160 4809 scope.go:117] "RemoveContainer" containerID="6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.854352 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f"} err="failed to get container status \"6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f\": rpc error: code = NotFound desc = could not find container \"6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f\": container with ID starting with 6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.854372 4809 scope.go:117] "RemoveContainer" containerID="bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.854814 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5"} err="failed to get container status \"bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\": rpc error: code = NotFound desc = could not find container \"bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\": container with ID starting with bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.854866 4809 scope.go:117] "RemoveContainer" containerID="f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.855181 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2"} err="failed to get container status \"f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\": rpc error: code = NotFound desc = could not find container \"f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\": container with ID starting with f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.855213 4809 scope.go:117] "RemoveContainer" containerID="ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.855922 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6"} err="failed to get container status \"ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\": rpc error: code = NotFound desc = could not find container \"ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\": container with ID starting with ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.856060 4809 scope.go:117] "RemoveContainer" containerID="605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.856283 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb"} err="failed to get container status \"605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\": rpc error: code = NotFound desc = could not find container \"605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\": container with ID starting with 605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.856304 4809 scope.go:117] "RemoveContainer" containerID="ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.856523 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4"} err="failed to get container status \"ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\": rpc error: code = NotFound desc = could not find container \"ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\": container with ID starting with ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.856541 4809 scope.go:117] "RemoveContainer" containerID="1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.856743 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88"} err="failed to get container status \"1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\": rpc error: code = NotFound desc = could not find container \"1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\": container with ID starting with 1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.856761 4809 scope.go:117] "RemoveContainer" containerID="a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.856990 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf"} err="failed to get container status \"a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\": rpc error: code = NotFound desc = could not find container \"a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\": container with ID starting with a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.857010 4809 scope.go:117] "RemoveContainer" containerID="c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.857227 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236"} err="failed to get container status \"c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\": rpc error: code = NotFound desc = could not find container \"c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\": container with ID starting with c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.857249 4809 scope.go:117] "RemoveContainer" containerID="044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.857463 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c"} err="failed to get container status \"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c\": rpc error: code = NotFound desc = could not find container \"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c\": container with ID starting with 044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.857492 4809 scope.go:117] "RemoveContainer" containerID="6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.857780 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f"} err="failed to get container status \"6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f\": rpc error: code = NotFound desc = could not find container \"6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f\": container with ID starting with 6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.857799 4809 scope.go:117] "RemoveContainer" containerID="bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.857984 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5"} err="failed to get container status \"bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\": rpc error: code = NotFound desc = could not find container \"bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\": container with ID starting with bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.858003 4809 scope.go:117] "RemoveContainer" containerID="f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.858201 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2"} err="failed to get container status \"f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\": rpc error: code = NotFound desc = could not find container \"f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\": container with ID starting with f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.858220 4809 scope.go:117] "RemoveContainer" containerID="ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.858407 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6"} err="failed to get container status \"ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\": rpc error: code = NotFound desc = could not find container \"ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\": container with ID starting with ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.858428 4809 scope.go:117] "RemoveContainer" containerID="605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.860264 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb"} err="failed to get container status \"605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\": rpc error: code = NotFound desc = could not find container \"605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\": container with ID starting with 605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.860296 4809 scope.go:117] "RemoveContainer" containerID="ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.861197 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4"} err="failed to get container status \"ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\": rpc error: code = NotFound desc = could not find container \"ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\": container with ID starting with ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.870787 4809 scope.go:117] "RemoveContainer" containerID="1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.872889 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88"} err="failed to get container status \"1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\": rpc error: code = NotFound desc = could not find container \"1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\": container with ID starting with 1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.872989 4809 scope.go:117] "RemoveContainer" containerID="a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.873350 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf"} err="failed to get container status \"a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\": rpc error: code = NotFound desc = could not find container \"a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\": container with ID starting with a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.873387 4809 scope.go:117] "RemoveContainer" containerID="c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.873661 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236"} err="failed to get container status \"c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\": rpc error: code = NotFound desc = could not find container \"c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\": container with ID starting with c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.873691 4809 scope.go:117] "RemoveContainer" containerID="044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.877827 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c"} err="failed to get container status \"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c\": rpc error: code = NotFound desc = could not find container \"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c\": container with ID starting with 044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.877889 4809 scope.go:117] "RemoveContainer" containerID="6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.878505 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f"} err="failed to get container status \"6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f\": rpc error: code = NotFound desc = could not find container \"6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f\": container with ID starting with 6db9b22f63b913165938f9ee0228b95e3d76dbaaa1a049c87e0a12c8d5679a6f not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.878536 4809 scope.go:117] "RemoveContainer" containerID="bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.879175 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5"} err="failed to get container status \"bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\": rpc error: code = NotFound desc = could not find container \"bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5\": container with ID starting with bf0593c053b68b8a3067bc43c34a20a73bbba2b44c47ba42270166825efdf8e5 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.879214 4809 scope.go:117] "RemoveContainer" containerID="f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.879503 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2"} err="failed to get container status \"f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\": rpc error: code = NotFound desc = could not find container \"f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2\": container with ID starting with f779b72a6ad5a7639f64b8baf50e5c6dfb78d86f751cd0ead0c28ae7d51e40a2 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.879528 4809 scope.go:117] "RemoveContainer" containerID="ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.879953 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6"} err="failed to get container status \"ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\": rpc error: code = NotFound desc = could not find container \"ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6\": container with ID starting with ee4ac2ccbbadc31d9f1aa412c0bdb03eb36a07ff4103492bc88c1e2ff19172b6 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.879982 4809 scope.go:117] "RemoveContainer" containerID="605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.880603 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb"} err="failed to get container status \"605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\": rpc error: code = NotFound desc = could not find container \"605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb\": container with ID starting with 605df66f4e56924306773faeba21507cbcbae561fd1aaa12242ab6841ee132bb not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.880632 4809 scope.go:117] "RemoveContainer" containerID="ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.880910 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4"} err="failed to get container status \"ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\": rpc error: code = NotFound desc = could not find container \"ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4\": container with ID starting with ce52c8ff2ba42bf930517342e295bc96cfa65d27eb0c1ce8a8c13db338531cd4 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.880933 4809 scope.go:117] "RemoveContainer" containerID="1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.881145 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88"} err="failed to get container status \"1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\": rpc error: code = NotFound desc = could not find container \"1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88\": container with ID starting with 1658a6124d10ace156b12dde2005a512172d6295c3d67de9f32422b73a2b3b88 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.881164 4809 scope.go:117] "RemoveContainer" containerID="a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.881377 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf"} err="failed to get container status \"a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\": rpc error: code = NotFound desc = could not find container \"a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf\": container with ID starting with a492d5ad4c9770d3c588fb7c874873ce23a91beaede5dc003ad838bed07148cf not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.881397 4809 scope.go:117] "RemoveContainer" containerID="c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.882475 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236"} err="failed to get container status \"c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\": rpc error: code = NotFound desc = could not find container \"c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236\": container with ID starting with c3d02d35c79dead52220007c869cbea5a8e64995d94dd2261ecc0c59733fc236 not found: ID does not exist" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.882501 4809 scope.go:117] "RemoveContainer" containerID="044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c" Sep 30 00:19:19 crc kubenswrapper[4809]: I0930 00:19:19.882743 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c"} err="failed to get container status \"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c\": rpc error: code = NotFound desc = could not find container \"044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c\": container with ID starting with 044f51917f7503663e2b301e814428a2aee3ae4a5765e477561feaa412f2d98c not found: ID does not exist" Sep 30 00:19:20 crc kubenswrapper[4809]: I0930 00:19:20.543859 4809 generic.go:334] "Generic (PLEG): container finished" podID="f7e35864-9ab3-4ad9-906b-2411517e8caf" containerID="9ee0d45ee0a768c89b35094ce54ac2314822f5cc088ab6b88c8d057a2e53f07d" exitCode=0 Sep 30 00:19:20 crc kubenswrapper[4809]: I0930 00:19:20.543931 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" event={"ID":"f7e35864-9ab3-4ad9-906b-2411517e8caf","Type":"ContainerDied","Data":"9ee0d45ee0a768c89b35094ce54ac2314822f5cc088ab6b88c8d057a2e53f07d"} Sep 30 00:19:20 crc kubenswrapper[4809]: I0930 00:19:20.544159 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" event={"ID":"f7e35864-9ab3-4ad9-906b-2411517e8caf","Type":"ContainerStarted","Data":"714736824d88e5ed2de5b2f4d1590e2e5d5c570ade0007c783cdee04bdfecb25"} Sep 30 00:19:21 crc kubenswrapper[4809]: I0930 00:19:21.552976 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" event={"ID":"f7e35864-9ab3-4ad9-906b-2411517e8caf","Type":"ContainerStarted","Data":"51759240eaa2af5073e6ee4965a716e5865b6893f9103f43182f97c41e950d9c"} Sep 30 00:19:21 crc kubenswrapper[4809]: I0930 00:19:21.553392 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" event={"ID":"f7e35864-9ab3-4ad9-906b-2411517e8caf","Type":"ContainerStarted","Data":"885b3221376ce5e9e665a8af79fb57702857bed317e9733546be38a8e9c4d016"} Sep 30 00:19:21 crc kubenswrapper[4809]: I0930 00:19:21.553404 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" event={"ID":"f7e35864-9ab3-4ad9-906b-2411517e8caf","Type":"ContainerStarted","Data":"252f59a550ec48c544c52ecafe0b560518fadb468af5a9750d41642d7d1f741c"} Sep 30 00:19:21 crc kubenswrapper[4809]: I0930 00:19:21.553420 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" event={"ID":"f7e35864-9ab3-4ad9-906b-2411517e8caf","Type":"ContainerStarted","Data":"edb1762a7fe663bfca0b4dddee409d3304f973e2361a77542d8819255ba48f3c"} Sep 30 00:19:21 crc kubenswrapper[4809]: I0930 00:19:21.553428 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" event={"ID":"f7e35864-9ab3-4ad9-906b-2411517e8caf","Type":"ContainerStarted","Data":"d0a63ffcc14213046e167af1a4c88fe099016a17e298b097fbf4cb5fb9c1c6ae"} Sep 30 00:19:21 crc kubenswrapper[4809]: I0930 00:19:21.553440 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" event={"ID":"f7e35864-9ab3-4ad9-906b-2411517e8caf","Type":"ContainerStarted","Data":"695eda7cf6426c4b11b2494bad69d983a7d081b611b6f95145f3e5ccb801d104"} Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.178380 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x"] Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.180076 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.183973 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.184282 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-ljmq7" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.184471 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.228214 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4"] Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.229158 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.232976 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-s22wg" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.233258 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.234703 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4"] Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.235598 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.256207 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxwrh\" (UniqueName: \"kubernetes.io/projected/a8ae4f48-169a-409c-bae6-6a89fb1263cb-kube-api-access-nxwrh\") pod \"obo-prometheus-operator-7c8cf85677-5tk2x\" (UID: \"a8ae4f48-169a-409c-bae6-6a89fb1263cb\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.357690 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/83cb98f0-24e4-42a8-a44d-e96d65c87580-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4\" (UID: \"83cb98f0-24e4-42a8-a44d-e96d65c87580\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.357777 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/83cb98f0-24e4-42a8-a44d-e96d65c87580-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4\" (UID: \"83cb98f0-24e4-42a8-a44d-e96d65c87580\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.357841 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5b43be87-9a51-42ab-85cc-f193171a9682-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4\" (UID: \"5b43be87-9a51-42ab-85cc-f193171a9682\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.357908 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxwrh\" (UniqueName: \"kubernetes.io/projected/a8ae4f48-169a-409c-bae6-6a89fb1263cb-kube-api-access-nxwrh\") pod \"obo-prometheus-operator-7c8cf85677-5tk2x\" (UID: \"a8ae4f48-169a-409c-bae6-6a89fb1263cb\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.358001 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5b43be87-9a51-42ab-85cc-f193171a9682-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4\" (UID: \"5b43be87-9a51-42ab-85cc-f193171a9682\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.391448 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxwrh\" (UniqueName: \"kubernetes.io/projected/a8ae4f48-169a-409c-bae6-6a89fb1263cb-kube-api-access-nxwrh\") pod \"obo-prometheus-operator-7c8cf85677-5tk2x\" (UID: \"a8ae4f48-169a-409c-bae6-6a89fb1263cb\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.409808 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-d9b5l"] Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.410484 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.412364 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-bvqrj" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.412618 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.459110 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5b43be87-9a51-42ab-85cc-f193171a9682-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4\" (UID: \"5b43be87-9a51-42ab-85cc-f193171a9682\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.459194 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5b43be87-9a51-42ab-85cc-f193171a9682-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4\" (UID: \"5b43be87-9a51-42ab-85cc-f193171a9682\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.459226 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/83cb98f0-24e4-42a8-a44d-e96d65c87580-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4\" (UID: \"83cb98f0-24e4-42a8-a44d-e96d65c87580\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.459247 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/83cb98f0-24e4-42a8-a44d-e96d65c87580-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4\" (UID: \"83cb98f0-24e4-42a8-a44d-e96d65c87580\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.463442 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5b43be87-9a51-42ab-85cc-f193171a9682-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4\" (UID: \"5b43be87-9a51-42ab-85cc-f193171a9682\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.464693 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/83cb98f0-24e4-42a8-a44d-e96d65c87580-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4\" (UID: \"83cb98f0-24e4-42a8-a44d-e96d65c87580\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.467325 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/83cb98f0-24e4-42a8-a44d-e96d65c87580-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4\" (UID: \"83cb98f0-24e4-42a8-a44d-e96d65c87580\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.476563 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5b43be87-9a51-42ab-85cc-f193171a9682-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4\" (UID: \"5b43be87-9a51-42ab-85cc-f193171a9682\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.498813 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" Sep 30 00:19:24 crc kubenswrapper[4809]: E0930 00:19:24.519478 4809 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-7c8cf85677-5tk2x_openshift-operators_a8ae4f48-169a-409c-bae6-6a89fb1263cb_0(c454c73bb4ae31528c92283220cbb776b6b2eff1852fc23970a562f744e6d7ff): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 00:19:24 crc kubenswrapper[4809]: E0930 00:19:24.519551 4809 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-7c8cf85677-5tk2x_openshift-operators_a8ae4f48-169a-409c-bae6-6a89fb1263cb_0(c454c73bb4ae31528c92283220cbb776b6b2eff1852fc23970a562f744e6d7ff): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" Sep 30 00:19:24 crc kubenswrapper[4809]: E0930 00:19:24.519570 4809 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-7c8cf85677-5tk2x_openshift-operators_a8ae4f48-169a-409c-bae6-6a89fb1263cb_0(c454c73bb4ae31528c92283220cbb776b6b2eff1852fc23970a562f744e6d7ff): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" Sep 30 00:19:24 crc kubenswrapper[4809]: E0930 00:19:24.519614 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-7c8cf85677-5tk2x_openshift-operators(a8ae4f48-169a-409c-bae6-6a89fb1263cb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-7c8cf85677-5tk2x_openshift-operators(a8ae4f48-169a-409c-bae6-6a89fb1263cb)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-7c8cf85677-5tk2x_openshift-operators_a8ae4f48-169a-409c-bae6-6a89fb1263cb_0(c454c73bb4ae31528c92283220cbb776b6b2eff1852fc23970a562f744e6d7ff): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" podUID="a8ae4f48-169a-409c-bae6-6a89fb1263cb" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.549923 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.559129 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.559994 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lll6p\" (UniqueName: \"kubernetes.io/projected/8eefb09b-5a65-4213-b5e8-9842cbd24fcf-kube-api-access-lll6p\") pod \"observability-operator-cc5f78dfc-d9b5l\" (UID: \"8eefb09b-5a65-4213-b5e8-9842cbd24fcf\") " pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.560048 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/8eefb09b-5a65-4213-b5e8-9842cbd24fcf-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-d9b5l\" (UID: \"8eefb09b-5a65-4213-b5e8-9842cbd24fcf\") " pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.576070 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" event={"ID":"f7e35864-9ab3-4ad9-906b-2411517e8caf","Type":"ContainerStarted","Data":"401d5af72f54044d2081fe831ce94afaf633913a356d9111b5956969605ed69a"} Sep 30 00:19:24 crc kubenswrapper[4809]: E0930 00:19:24.595593 4809 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4_openshift-operators_5b43be87-9a51-42ab-85cc-f193171a9682_0(7cc8cedf9f14837df423674cef02dc43c0a7693ad052061f2c523a3690caa6ac): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 00:19:24 crc kubenswrapper[4809]: E0930 00:19:24.595668 4809 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4_openshift-operators_5b43be87-9a51-42ab-85cc-f193171a9682_0(7cc8cedf9f14837df423674cef02dc43c0a7693ad052061f2c523a3690caa6ac): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" Sep 30 00:19:24 crc kubenswrapper[4809]: E0930 00:19:24.595696 4809 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4_openshift-operators_5b43be87-9a51-42ab-85cc-f193171a9682_0(7cc8cedf9f14837df423674cef02dc43c0a7693ad052061f2c523a3690caa6ac): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" Sep 30 00:19:24 crc kubenswrapper[4809]: E0930 00:19:24.595735 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4_openshift-operators(5b43be87-9a51-42ab-85cc-f193171a9682)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4_openshift-operators(5b43be87-9a51-42ab-85cc-f193171a9682)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4_openshift-operators_5b43be87-9a51-42ab-85cc-f193171a9682_0(7cc8cedf9f14837df423674cef02dc43c0a7693ad052061f2c523a3690caa6ac): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" podUID="5b43be87-9a51-42ab-85cc-f193171a9682" Sep 30 00:19:24 crc kubenswrapper[4809]: E0930 00:19:24.620144 4809 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4_openshift-operators_83cb98f0-24e4-42a8-a44d-e96d65c87580_0(d303b88f55dec29c5c9dfa6f91dc4e5f201b0012e996bc774a896262cd9820dd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 00:19:24 crc kubenswrapper[4809]: E0930 00:19:24.620204 4809 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4_openshift-operators_83cb98f0-24e4-42a8-a44d-e96d65c87580_0(d303b88f55dec29c5c9dfa6f91dc4e5f201b0012e996bc774a896262cd9820dd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" Sep 30 00:19:24 crc kubenswrapper[4809]: E0930 00:19:24.620228 4809 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4_openshift-operators_83cb98f0-24e4-42a8-a44d-e96d65c87580_0(d303b88f55dec29c5c9dfa6f91dc4e5f201b0012e996bc774a896262cd9820dd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" Sep 30 00:19:24 crc kubenswrapper[4809]: E0930 00:19:24.620268 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4_openshift-operators(83cb98f0-24e4-42a8-a44d-e96d65c87580)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4_openshift-operators(83cb98f0-24e4-42a8-a44d-e96d65c87580)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4_openshift-operators_83cb98f0-24e4-42a8-a44d-e96d65c87580_0(d303b88f55dec29c5c9dfa6f91dc4e5f201b0012e996bc774a896262cd9820dd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" podUID="83cb98f0-24e4-42a8-a44d-e96d65c87580" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.628965 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-svnh2"] Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.629801 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.632222 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-rdxq6" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.661020 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lll6p\" (UniqueName: \"kubernetes.io/projected/8eefb09b-5a65-4213-b5e8-9842cbd24fcf-kube-api-access-lll6p\") pod \"observability-operator-cc5f78dfc-d9b5l\" (UID: \"8eefb09b-5a65-4213-b5e8-9842cbd24fcf\") " pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.661096 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/8eefb09b-5a65-4213-b5e8-9842cbd24fcf-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-d9b5l\" (UID: \"8eefb09b-5a65-4213-b5e8-9842cbd24fcf\") " pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.665237 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/8eefb09b-5a65-4213-b5e8-9842cbd24fcf-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-d9b5l\" (UID: \"8eefb09b-5a65-4213-b5e8-9842cbd24fcf\") " pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.685500 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lll6p\" (UniqueName: \"kubernetes.io/projected/8eefb09b-5a65-4213-b5e8-9842cbd24fcf-kube-api-access-lll6p\") pod \"observability-operator-cc5f78dfc-d9b5l\" (UID: \"8eefb09b-5a65-4213-b5e8-9842cbd24fcf\") " pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.730177 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:19:24 crc kubenswrapper[4809]: E0930 00:19:24.755835 4809 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-cc5f78dfc-d9b5l_openshift-operators_8eefb09b-5a65-4213-b5e8-9842cbd24fcf_0(a55d6c52d848627d9cb5792ec461d3f88503ce937a12e8ada0f15f219f8f6a53): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 00:19:24 crc kubenswrapper[4809]: E0930 00:19:24.755901 4809 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-cc5f78dfc-d9b5l_openshift-operators_8eefb09b-5a65-4213-b5e8-9842cbd24fcf_0(a55d6c52d848627d9cb5792ec461d3f88503ce937a12e8ada0f15f219f8f6a53): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:19:24 crc kubenswrapper[4809]: E0930 00:19:24.755923 4809 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-cc5f78dfc-d9b5l_openshift-operators_8eefb09b-5a65-4213-b5e8-9842cbd24fcf_0(a55d6c52d848627d9cb5792ec461d3f88503ce937a12e8ada0f15f219f8f6a53): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:19:24 crc kubenswrapper[4809]: E0930 00:19:24.755967 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-cc5f78dfc-d9b5l_openshift-operators(8eefb09b-5a65-4213-b5e8-9842cbd24fcf)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-cc5f78dfc-d9b5l_openshift-operators(8eefb09b-5a65-4213-b5e8-9842cbd24fcf)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-cc5f78dfc-d9b5l_openshift-operators_8eefb09b-5a65-4213-b5e8-9842cbd24fcf_0(a55d6c52d848627d9cb5792ec461d3f88503ce937a12e8ada0f15f219f8f6a53): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" podUID="8eefb09b-5a65-4213-b5e8-9842cbd24fcf" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.762480 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/4b9630a3-4470-48ab-982b-f9f9cedf52da-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-svnh2\" (UID: \"4b9630a3-4470-48ab-982b-f9f9cedf52da\") " pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.762578 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8x78j\" (UniqueName: \"kubernetes.io/projected/4b9630a3-4470-48ab-982b-f9f9cedf52da-kube-api-access-8x78j\") pod \"perses-operator-54bc95c9fb-svnh2\" (UID: \"4b9630a3-4470-48ab-982b-f9f9cedf52da\") " pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.864353 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/4b9630a3-4470-48ab-982b-f9f9cedf52da-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-svnh2\" (UID: \"4b9630a3-4470-48ab-982b-f9f9cedf52da\") " pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.864423 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x78j\" (UniqueName: \"kubernetes.io/projected/4b9630a3-4470-48ab-982b-f9f9cedf52da-kube-api-access-8x78j\") pod \"perses-operator-54bc95c9fb-svnh2\" (UID: \"4b9630a3-4470-48ab-982b-f9f9cedf52da\") " pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.865762 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/4b9630a3-4470-48ab-982b-f9f9cedf52da-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-svnh2\" (UID: \"4b9630a3-4470-48ab-982b-f9f9cedf52da\") " pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.881925 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8x78j\" (UniqueName: \"kubernetes.io/projected/4b9630a3-4470-48ab-982b-f9f9cedf52da-kube-api-access-8x78j\") pod \"perses-operator-54bc95c9fb-svnh2\" (UID: \"4b9630a3-4470-48ab-982b-f9f9cedf52da\") " pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:24 crc kubenswrapper[4809]: I0930 00:19:24.951628 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:24 crc kubenswrapper[4809]: E0930 00:19:24.972548 4809 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-54bc95c9fb-svnh2_openshift-operators_4b9630a3-4470-48ab-982b-f9f9cedf52da_0(0e2c59bd8cb90f69590c0fd47f301cbf9b306768aa2ab4a2fccdb799ca9ba5b8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 00:19:24 crc kubenswrapper[4809]: E0930 00:19:24.972624 4809 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-54bc95c9fb-svnh2_openshift-operators_4b9630a3-4470-48ab-982b-f9f9cedf52da_0(0e2c59bd8cb90f69590c0fd47f301cbf9b306768aa2ab4a2fccdb799ca9ba5b8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:24 crc kubenswrapper[4809]: E0930 00:19:24.972668 4809 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-54bc95c9fb-svnh2_openshift-operators_4b9630a3-4470-48ab-982b-f9f9cedf52da_0(0e2c59bd8cb90f69590c0fd47f301cbf9b306768aa2ab4a2fccdb799ca9ba5b8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:24 crc kubenswrapper[4809]: E0930 00:19:24.972737 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-54bc95c9fb-svnh2_openshift-operators(4b9630a3-4470-48ab-982b-f9f9cedf52da)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-54bc95c9fb-svnh2_openshift-operators(4b9630a3-4470-48ab-982b-f9f9cedf52da)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-54bc95c9fb-svnh2_openshift-operators_4b9630a3-4470-48ab-982b-f9f9cedf52da_0(0e2c59bd8cb90f69590c0fd47f301cbf9b306768aa2ab4a2fccdb799ca9ba5b8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" podUID="4b9630a3-4470-48ab-982b-f9f9cedf52da" Sep 30 00:19:26 crc kubenswrapper[4809]: I0930 00:19:26.589181 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" event={"ID":"f7e35864-9ab3-4ad9-906b-2411517e8caf","Type":"ContainerStarted","Data":"08cec4bbc6f8137e03bf174e9cc949c20c6d88452aa79c1ec6d17be53ef83aae"} Sep 30 00:19:26 crc kubenswrapper[4809]: I0930 00:19:26.589768 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:26 crc kubenswrapper[4809]: I0930 00:19:26.589780 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:26 crc kubenswrapper[4809]: I0930 00:19:26.622401 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:26 crc kubenswrapper[4809]: I0930 00:19:26.628868 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" podStartSLOduration=7.628851909 podStartE2EDuration="7.628851909s" podCreationTimestamp="2025-09-30 00:19:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:19:26.628321864 +0000 UTC m=+617.664571272" watchObservedRunningTime="2025-09-30 00:19:26.628851909 +0000 UTC m=+617.665101327" Sep 30 00:19:26 crc kubenswrapper[4809]: I0930 00:19:26.991808 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x"] Sep 30 00:19:26 crc kubenswrapper[4809]: I0930 00:19:26.991964 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" Sep 30 00:19:26 crc kubenswrapper[4809]: I0930 00:19:26.992336 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" Sep 30 00:19:26 crc kubenswrapper[4809]: I0930 00:19:26.995103 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4"] Sep 30 00:19:26 crc kubenswrapper[4809]: I0930 00:19:26.995224 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" Sep 30 00:19:26 crc kubenswrapper[4809]: I0930 00:19:26.995727 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" Sep 30 00:19:27 crc kubenswrapper[4809]: E0930 00:19:27.023880 4809 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4_openshift-operators_83cb98f0-24e4-42a8-a44d-e96d65c87580_0(3ec88cfd42629f0f648fae20e50764f03647f117c13b222b1f5be089c9439f6a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 00:19:27 crc kubenswrapper[4809]: E0930 00:19:27.023939 4809 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4_openshift-operators_83cb98f0-24e4-42a8-a44d-e96d65c87580_0(3ec88cfd42629f0f648fae20e50764f03647f117c13b222b1f5be089c9439f6a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" Sep 30 00:19:27 crc kubenswrapper[4809]: E0930 00:19:27.023960 4809 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4_openshift-operators_83cb98f0-24e4-42a8-a44d-e96d65c87580_0(3ec88cfd42629f0f648fae20e50764f03647f117c13b222b1f5be089c9439f6a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" Sep 30 00:19:27 crc kubenswrapper[4809]: E0930 00:19:27.024003 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4_openshift-operators(83cb98f0-24e4-42a8-a44d-e96d65c87580)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4_openshift-operators(83cb98f0-24e4-42a8-a44d-e96d65c87580)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4_openshift-operators_83cb98f0-24e4-42a8-a44d-e96d65c87580_0(3ec88cfd42629f0f648fae20e50764f03647f117c13b222b1f5be089c9439f6a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" podUID="83cb98f0-24e4-42a8-a44d-e96d65c87580" Sep 30 00:19:27 crc kubenswrapper[4809]: E0930 00:19:27.029201 4809 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-7c8cf85677-5tk2x_openshift-operators_a8ae4f48-169a-409c-bae6-6a89fb1263cb_0(bf1904fb06619bcf9722fcf258dac48071f978e0d4850e552816faa7cb88aa87): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 00:19:27 crc kubenswrapper[4809]: E0930 00:19:27.029259 4809 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-7c8cf85677-5tk2x_openshift-operators_a8ae4f48-169a-409c-bae6-6a89fb1263cb_0(bf1904fb06619bcf9722fcf258dac48071f978e0d4850e552816faa7cb88aa87): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" Sep 30 00:19:27 crc kubenswrapper[4809]: E0930 00:19:27.029285 4809 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-7c8cf85677-5tk2x_openshift-operators_a8ae4f48-169a-409c-bae6-6a89fb1263cb_0(bf1904fb06619bcf9722fcf258dac48071f978e0d4850e552816faa7cb88aa87): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" Sep 30 00:19:27 crc kubenswrapper[4809]: E0930 00:19:27.029328 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-7c8cf85677-5tk2x_openshift-operators(a8ae4f48-169a-409c-bae6-6a89fb1263cb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-7c8cf85677-5tk2x_openshift-operators(a8ae4f48-169a-409c-bae6-6a89fb1263cb)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-7c8cf85677-5tk2x_openshift-operators_a8ae4f48-169a-409c-bae6-6a89fb1263cb_0(bf1904fb06619bcf9722fcf258dac48071f978e0d4850e552816faa7cb88aa87): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" podUID="a8ae4f48-169a-409c-bae6-6a89fb1263cb" Sep 30 00:19:27 crc kubenswrapper[4809]: I0930 00:19:27.053112 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-d9b5l"] Sep 30 00:19:27 crc kubenswrapper[4809]: I0930 00:19:27.053232 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:19:27 crc kubenswrapper[4809]: I0930 00:19:27.053691 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:19:27 crc kubenswrapper[4809]: I0930 00:19:27.058564 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-svnh2"] Sep 30 00:19:27 crc kubenswrapper[4809]: I0930 00:19:27.058706 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:27 crc kubenswrapper[4809]: I0930 00:19:27.059230 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:27 crc kubenswrapper[4809]: I0930 00:19:27.061464 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4"] Sep 30 00:19:27 crc kubenswrapper[4809]: I0930 00:19:27.061621 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" Sep 30 00:19:27 crc kubenswrapper[4809]: I0930 00:19:27.062079 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" Sep 30 00:19:27 crc kubenswrapper[4809]: E0930 00:19:27.098834 4809 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-54bc95c9fb-svnh2_openshift-operators_4b9630a3-4470-48ab-982b-f9f9cedf52da_0(bf9505e29567a3ca4aab6a8366c0762722606f50a268c0a681efbac4bec661f6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 00:19:27 crc kubenswrapper[4809]: E0930 00:19:27.098925 4809 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-54bc95c9fb-svnh2_openshift-operators_4b9630a3-4470-48ab-982b-f9f9cedf52da_0(bf9505e29567a3ca4aab6a8366c0762722606f50a268c0a681efbac4bec661f6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:27 crc kubenswrapper[4809]: E0930 00:19:27.098958 4809 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-54bc95c9fb-svnh2_openshift-operators_4b9630a3-4470-48ab-982b-f9f9cedf52da_0(bf9505e29567a3ca4aab6a8366c0762722606f50a268c0a681efbac4bec661f6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:27 crc kubenswrapper[4809]: E0930 00:19:27.099032 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-54bc95c9fb-svnh2_openshift-operators(4b9630a3-4470-48ab-982b-f9f9cedf52da)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-54bc95c9fb-svnh2_openshift-operators(4b9630a3-4470-48ab-982b-f9f9cedf52da)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-54bc95c9fb-svnh2_openshift-operators_4b9630a3-4470-48ab-982b-f9f9cedf52da_0(bf9505e29567a3ca4aab6a8366c0762722606f50a268c0a681efbac4bec661f6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" podUID="4b9630a3-4470-48ab-982b-f9f9cedf52da" Sep 30 00:19:27 crc kubenswrapper[4809]: E0930 00:19:27.103945 4809 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-cc5f78dfc-d9b5l_openshift-operators_8eefb09b-5a65-4213-b5e8-9842cbd24fcf_0(aed355e0e31e793705e400c7f4723a54d6243012ed665b5d058146dd366e156b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 00:19:27 crc kubenswrapper[4809]: E0930 00:19:27.104014 4809 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-cc5f78dfc-d9b5l_openshift-operators_8eefb09b-5a65-4213-b5e8-9842cbd24fcf_0(aed355e0e31e793705e400c7f4723a54d6243012ed665b5d058146dd366e156b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:19:27 crc kubenswrapper[4809]: E0930 00:19:27.104055 4809 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-cc5f78dfc-d9b5l_openshift-operators_8eefb09b-5a65-4213-b5e8-9842cbd24fcf_0(aed355e0e31e793705e400c7f4723a54d6243012ed665b5d058146dd366e156b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:19:27 crc kubenswrapper[4809]: E0930 00:19:27.104096 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-cc5f78dfc-d9b5l_openshift-operators(8eefb09b-5a65-4213-b5e8-9842cbd24fcf)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-cc5f78dfc-d9b5l_openshift-operators(8eefb09b-5a65-4213-b5e8-9842cbd24fcf)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-cc5f78dfc-d9b5l_openshift-operators_8eefb09b-5a65-4213-b5e8-9842cbd24fcf_0(aed355e0e31e793705e400c7f4723a54d6243012ed665b5d058146dd366e156b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" podUID="8eefb09b-5a65-4213-b5e8-9842cbd24fcf" Sep 30 00:19:27 crc kubenswrapper[4809]: E0930 00:19:27.114149 4809 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4_openshift-operators_5b43be87-9a51-42ab-85cc-f193171a9682_0(e0d4872e00fbc620e73ad06db9514da6bfec5964c4983609e2a6a9a3313c3583): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 00:19:27 crc kubenswrapper[4809]: E0930 00:19:27.114218 4809 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4_openshift-operators_5b43be87-9a51-42ab-85cc-f193171a9682_0(e0d4872e00fbc620e73ad06db9514da6bfec5964c4983609e2a6a9a3313c3583): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" Sep 30 00:19:27 crc kubenswrapper[4809]: E0930 00:19:27.114247 4809 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4_openshift-operators_5b43be87-9a51-42ab-85cc-f193171a9682_0(e0d4872e00fbc620e73ad06db9514da6bfec5964c4983609e2a6a9a3313c3583): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" Sep 30 00:19:27 crc kubenswrapper[4809]: E0930 00:19:27.114297 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4_openshift-operators(5b43be87-9a51-42ab-85cc-f193171a9682)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4_openshift-operators(5b43be87-9a51-42ab-85cc-f193171a9682)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4_openshift-operators_5b43be87-9a51-42ab-85cc-f193171a9682_0(e0d4872e00fbc620e73ad06db9514da6bfec5964c4983609e2a6a9a3313c3583): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" podUID="5b43be87-9a51-42ab-85cc-f193171a9682" Sep 30 00:19:27 crc kubenswrapper[4809]: I0930 00:19:27.593868 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:27 crc kubenswrapper[4809]: I0930 00:19:27.626634 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:30 crc kubenswrapper[4809]: I0930 00:19:30.690785 4809 scope.go:117] "RemoveContainer" containerID="f5c595bedd8ad8ab824b5c4f31b23ff132f383cd0e37b5ad71bd7e639e3a350a" Sep 30 00:19:30 crc kubenswrapper[4809]: E0930 00:19:30.691638 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-h6xqr_openshift-multus(efc7b2e1-7308-483a-9117-02e83c45a528)\"" pod="openshift-multus/multus-h6xqr" podUID="efc7b2e1-7308-483a-9117-02e83c45a528" Sep 30 00:19:37 crc kubenswrapper[4809]: I0930 00:19:37.690269 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:37 crc kubenswrapper[4809]: I0930 00:19:37.691021 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:37 crc kubenswrapper[4809]: E0930 00:19:37.718477 4809 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-54bc95c9fb-svnh2_openshift-operators_4b9630a3-4470-48ab-982b-f9f9cedf52da_0(95ad00238d52f89ff3e3a3f24c5666c29f342cf3869e71f759e244c0c96d7341): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 00:19:37 crc kubenswrapper[4809]: E0930 00:19:37.718560 4809 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-54bc95c9fb-svnh2_openshift-operators_4b9630a3-4470-48ab-982b-f9f9cedf52da_0(95ad00238d52f89ff3e3a3f24c5666c29f342cf3869e71f759e244c0c96d7341): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:37 crc kubenswrapper[4809]: E0930 00:19:37.718592 4809 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-54bc95c9fb-svnh2_openshift-operators_4b9630a3-4470-48ab-982b-f9f9cedf52da_0(95ad00238d52f89ff3e3a3f24c5666c29f342cf3869e71f759e244c0c96d7341): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:37 crc kubenswrapper[4809]: E0930 00:19:37.718673 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-54bc95c9fb-svnh2_openshift-operators(4b9630a3-4470-48ab-982b-f9f9cedf52da)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-54bc95c9fb-svnh2_openshift-operators(4b9630a3-4470-48ab-982b-f9f9cedf52da)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-54bc95c9fb-svnh2_openshift-operators_4b9630a3-4470-48ab-982b-f9f9cedf52da_0(95ad00238d52f89ff3e3a3f24c5666c29f342cf3869e71f759e244c0c96d7341): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" podUID="4b9630a3-4470-48ab-982b-f9f9cedf52da" Sep 30 00:19:38 crc kubenswrapper[4809]: I0930 00:19:38.690971 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" Sep 30 00:19:38 crc kubenswrapper[4809]: I0930 00:19:38.691534 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" Sep 30 00:19:38 crc kubenswrapper[4809]: E0930 00:19:38.715763 4809 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-7c8cf85677-5tk2x_openshift-operators_a8ae4f48-169a-409c-bae6-6a89fb1263cb_0(7558f3ee38b208c08a8086e102d9c08b83a1673b3f06d28fe9e545b1b0c202b1): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 00:19:38 crc kubenswrapper[4809]: E0930 00:19:38.715887 4809 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-7c8cf85677-5tk2x_openshift-operators_a8ae4f48-169a-409c-bae6-6a89fb1263cb_0(7558f3ee38b208c08a8086e102d9c08b83a1673b3f06d28fe9e545b1b0c202b1): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" Sep 30 00:19:38 crc kubenswrapper[4809]: E0930 00:19:38.715919 4809 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-7c8cf85677-5tk2x_openshift-operators_a8ae4f48-169a-409c-bae6-6a89fb1263cb_0(7558f3ee38b208c08a8086e102d9c08b83a1673b3f06d28fe9e545b1b0c202b1): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" Sep 30 00:19:38 crc kubenswrapper[4809]: E0930 00:19:38.716003 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-7c8cf85677-5tk2x_openshift-operators(a8ae4f48-169a-409c-bae6-6a89fb1263cb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-7c8cf85677-5tk2x_openshift-operators(a8ae4f48-169a-409c-bae6-6a89fb1263cb)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-7c8cf85677-5tk2x_openshift-operators_a8ae4f48-169a-409c-bae6-6a89fb1263cb_0(7558f3ee38b208c08a8086e102d9c08b83a1673b3f06d28fe9e545b1b0c202b1): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" podUID="a8ae4f48-169a-409c-bae6-6a89fb1263cb" Sep 30 00:19:39 crc kubenswrapper[4809]: I0930 00:19:39.689925 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" Sep 30 00:19:39 crc kubenswrapper[4809]: I0930 00:19:39.693746 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" Sep 30 00:19:39 crc kubenswrapper[4809]: E0930 00:19:39.752958 4809 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4_openshift-operators_5b43be87-9a51-42ab-85cc-f193171a9682_0(7d75609e86581b7e57f56977cf09ef9d26df80151d44578e3bf4deddd57c982a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 00:19:39 crc kubenswrapper[4809]: E0930 00:19:39.753060 4809 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4_openshift-operators_5b43be87-9a51-42ab-85cc-f193171a9682_0(7d75609e86581b7e57f56977cf09ef9d26df80151d44578e3bf4deddd57c982a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" Sep 30 00:19:39 crc kubenswrapper[4809]: E0930 00:19:39.753095 4809 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4_openshift-operators_5b43be87-9a51-42ab-85cc-f193171a9682_0(7d75609e86581b7e57f56977cf09ef9d26df80151d44578e3bf4deddd57c982a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" Sep 30 00:19:39 crc kubenswrapper[4809]: E0930 00:19:39.753197 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4_openshift-operators(5b43be87-9a51-42ab-85cc-f193171a9682)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4_openshift-operators(5b43be87-9a51-42ab-85cc-f193171a9682)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4_openshift-operators_5b43be87-9a51-42ab-85cc-f193171a9682_0(7d75609e86581b7e57f56977cf09ef9d26df80151d44578e3bf4deddd57c982a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" podUID="5b43be87-9a51-42ab-85cc-f193171a9682" Sep 30 00:19:41 crc kubenswrapper[4809]: I0930 00:19:41.689971 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:19:41 crc kubenswrapper[4809]: I0930 00:19:41.689993 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" Sep 30 00:19:41 crc kubenswrapper[4809]: I0930 00:19:41.692072 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" Sep 30 00:19:41 crc kubenswrapper[4809]: I0930 00:19:41.692220 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:19:41 crc kubenswrapper[4809]: E0930 00:19:41.726391 4809 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4_openshift-operators_83cb98f0-24e4-42a8-a44d-e96d65c87580_0(8ea1f60167968d8da4c941dd72df4bcc0ee7e680c456c24c1b1a45d476746f3b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 00:19:41 crc kubenswrapper[4809]: E0930 00:19:41.726834 4809 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4_openshift-operators_83cb98f0-24e4-42a8-a44d-e96d65c87580_0(8ea1f60167968d8da4c941dd72df4bcc0ee7e680c456c24c1b1a45d476746f3b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" Sep 30 00:19:41 crc kubenswrapper[4809]: E0930 00:19:41.726884 4809 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4_openshift-operators_83cb98f0-24e4-42a8-a44d-e96d65c87580_0(8ea1f60167968d8da4c941dd72df4bcc0ee7e680c456c24c1b1a45d476746f3b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" Sep 30 00:19:41 crc kubenswrapper[4809]: E0930 00:19:41.726960 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4_openshift-operators(83cb98f0-24e4-42a8-a44d-e96d65c87580)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4_openshift-operators(83cb98f0-24e4-42a8-a44d-e96d65c87580)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4_openshift-operators_83cb98f0-24e4-42a8-a44d-e96d65c87580_0(8ea1f60167968d8da4c941dd72df4bcc0ee7e680c456c24c1b1a45d476746f3b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" podUID="83cb98f0-24e4-42a8-a44d-e96d65c87580" Sep 30 00:19:41 crc kubenswrapper[4809]: E0930 00:19:41.735899 4809 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-cc5f78dfc-d9b5l_openshift-operators_8eefb09b-5a65-4213-b5e8-9842cbd24fcf_0(bd7985dc1360e11e7c31c9b01e7271986ac4e97648a36b09e45e610fb6849eae): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 00:19:41 crc kubenswrapper[4809]: E0930 00:19:41.735947 4809 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-cc5f78dfc-d9b5l_openshift-operators_8eefb09b-5a65-4213-b5e8-9842cbd24fcf_0(bd7985dc1360e11e7c31c9b01e7271986ac4e97648a36b09e45e610fb6849eae): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:19:41 crc kubenswrapper[4809]: E0930 00:19:41.735969 4809 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-cc5f78dfc-d9b5l_openshift-operators_8eefb09b-5a65-4213-b5e8-9842cbd24fcf_0(bd7985dc1360e11e7c31c9b01e7271986ac4e97648a36b09e45e610fb6849eae): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:19:41 crc kubenswrapper[4809]: E0930 00:19:41.736008 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-cc5f78dfc-d9b5l_openshift-operators(8eefb09b-5a65-4213-b5e8-9842cbd24fcf)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-cc5f78dfc-d9b5l_openshift-operators(8eefb09b-5a65-4213-b5e8-9842cbd24fcf)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-cc5f78dfc-d9b5l_openshift-operators_8eefb09b-5a65-4213-b5e8-9842cbd24fcf_0(bd7985dc1360e11e7c31c9b01e7271986ac4e97648a36b09e45e610fb6849eae): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" podUID="8eefb09b-5a65-4213-b5e8-9842cbd24fcf" Sep 30 00:19:44 crc kubenswrapper[4809]: I0930 00:19:44.690508 4809 scope.go:117] "RemoveContainer" containerID="f5c595bedd8ad8ab824b5c4f31b23ff132f383cd0e37b5ad71bd7e639e3a350a" Sep 30 00:19:45 crc kubenswrapper[4809]: I0930 00:19:45.675168 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-h6xqr_efc7b2e1-7308-483a-9117-02e83c45a528/kube-multus/2.log" Sep 30 00:19:45 crc kubenswrapper[4809]: I0930 00:19:45.675744 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-h6xqr_efc7b2e1-7308-483a-9117-02e83c45a528/kube-multus/1.log" Sep 30 00:19:45 crc kubenswrapper[4809]: I0930 00:19:45.675790 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-h6xqr" event={"ID":"efc7b2e1-7308-483a-9117-02e83c45a528","Type":"ContainerStarted","Data":"1f3aa2a906a541f62e9a6e843e41c670a7df1ba363f9d0d2347aad2ab850bac9"} Sep 30 00:19:48 crc kubenswrapper[4809]: I0930 00:19:48.690056 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:48 crc kubenswrapper[4809]: I0930 00:19:48.690897 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:48 crc kubenswrapper[4809]: I0930 00:19:48.885238 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-svnh2"] Sep 30 00:19:48 crc kubenswrapper[4809]: W0930 00:19:48.902948 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4b9630a3_4470_48ab_982b_f9f9cedf52da.slice/crio-d055cc5de45b1f33b5c045de20b0882849626e2b7763659f938cb1e4abefc751 WatchSource:0}: Error finding container d055cc5de45b1f33b5c045de20b0882849626e2b7763659f938cb1e4abefc751: Status 404 returned error can't find the container with id d055cc5de45b1f33b5c045de20b0882849626e2b7763659f938cb1e4abefc751 Sep 30 00:19:49 crc kubenswrapper[4809]: I0930 00:19:49.598982 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4lwrb" Sep 30 00:19:49 crc kubenswrapper[4809]: I0930 00:19:49.698859 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" event={"ID":"4b9630a3-4470-48ab-982b-f9f9cedf52da","Type":"ContainerStarted","Data":"d055cc5de45b1f33b5c045de20b0882849626e2b7763659f938cb1e4abefc751"} Sep 30 00:19:52 crc kubenswrapper[4809]: I0930 00:19:52.691237 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" Sep 30 00:19:52 crc kubenswrapper[4809]: I0930 00:19:52.691677 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" Sep 30 00:19:53 crc kubenswrapper[4809]: I0930 00:19:53.690843 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" Sep 30 00:19:53 crc kubenswrapper[4809]: I0930 00:19:53.690847 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:19:53 crc kubenswrapper[4809]: I0930 00:19:53.691386 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" Sep 30 00:19:53 crc kubenswrapper[4809]: I0930 00:19:53.691519 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:19:55 crc kubenswrapper[4809]: I0930 00:19:55.148879 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-d9b5l"] Sep 30 00:19:55 crc kubenswrapper[4809]: I0930 00:19:55.278479 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4"] Sep 30 00:19:55 crc kubenswrapper[4809]: I0930 00:19:55.282304 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x"] Sep 30 00:19:55 crc kubenswrapper[4809]: W0930 00:19:55.283911 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5b43be87_9a51_42ab_85cc_f193171a9682.slice/crio-5884dd6100adf0403ee2943cc09083bef52351a89093c8265b3fdc9d043fc165 WatchSource:0}: Error finding container 5884dd6100adf0403ee2943cc09083bef52351a89093c8265b3fdc9d043fc165: Status 404 returned error can't find the container with id 5884dd6100adf0403ee2943cc09083bef52351a89093c8265b3fdc9d043fc165 Sep 30 00:19:55 crc kubenswrapper[4809]: W0930 00:19:55.287841 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda8ae4f48_169a_409c_bae6_6a89fb1263cb.slice/crio-7c1c0cf5e06f4017f3f324ba349fe9ed55850a3fa28873e68d714d3798a896c9 WatchSource:0}: Error finding container 7c1c0cf5e06f4017f3f324ba349fe9ed55850a3fa28873e68d714d3798a896c9: Status 404 returned error can't find the container with id 7c1c0cf5e06f4017f3f324ba349fe9ed55850a3fa28873e68d714d3798a896c9 Sep 30 00:19:55 crc kubenswrapper[4809]: I0930 00:19:55.723921 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" event={"ID":"8eefb09b-5a65-4213-b5e8-9842cbd24fcf","Type":"ContainerStarted","Data":"f1c1287e6befd6ab1b257bd40561b8acfd4cb476b61084a78e1f2707ad3bdbbb"} Sep 30 00:19:55 crc kubenswrapper[4809]: I0930 00:19:55.725218 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" event={"ID":"4b9630a3-4470-48ab-982b-f9f9cedf52da","Type":"ContainerStarted","Data":"a306a897c07cbdda8f9cce344d7e4eb085cc17a66be3780bef0a150ad984514e"} Sep 30 00:19:55 crc kubenswrapper[4809]: I0930 00:19:55.725289 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:19:55 crc kubenswrapper[4809]: I0930 00:19:55.726147 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" event={"ID":"5b43be87-9a51-42ab-85cc-f193171a9682","Type":"ContainerStarted","Data":"5884dd6100adf0403ee2943cc09083bef52351a89093c8265b3fdc9d043fc165"} Sep 30 00:19:55 crc kubenswrapper[4809]: I0930 00:19:55.727239 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" event={"ID":"a8ae4f48-169a-409c-bae6-6a89fb1263cb","Type":"ContainerStarted","Data":"7c1c0cf5e06f4017f3f324ba349fe9ed55850a3fa28873e68d714d3798a896c9"} Sep 30 00:19:55 crc kubenswrapper[4809]: I0930 00:19:55.743058 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" podStartSLOduration=25.693018119 podStartE2EDuration="31.743040795s" podCreationTimestamp="2025-09-30 00:19:24 +0000 UTC" firstStartedPulling="2025-09-30 00:19:48.905293906 +0000 UTC m=+639.941543314" lastFinishedPulling="2025-09-30 00:19:54.955316582 +0000 UTC m=+645.991565990" observedRunningTime="2025-09-30 00:19:55.740455594 +0000 UTC m=+646.776705012" watchObservedRunningTime="2025-09-30 00:19:55.743040795 +0000 UTC m=+646.779290203" Sep 30 00:19:56 crc kubenswrapper[4809]: I0930 00:19:56.690433 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" Sep 30 00:19:56 crc kubenswrapper[4809]: I0930 00:19:56.691453 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" Sep 30 00:19:56 crc kubenswrapper[4809]: I0930 00:19:56.988804 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4"] Sep 30 00:19:56 crc kubenswrapper[4809]: W0930 00:19:56.997317 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod83cb98f0_24e4_42a8_a44d_e96d65c87580.slice/crio-951f90e743b7af344414867f86ae9de20235355f90e5c9c0c4bd0a8a92d17ae9 WatchSource:0}: Error finding container 951f90e743b7af344414867f86ae9de20235355f90e5c9c0c4bd0a8a92d17ae9: Status 404 returned error can't find the container with id 951f90e743b7af344414867f86ae9de20235355f90e5c9c0c4bd0a8a92d17ae9 Sep 30 00:19:57 crc kubenswrapper[4809]: I0930 00:19:57.749471 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" event={"ID":"83cb98f0-24e4-42a8-a44d-e96d65c87580","Type":"ContainerStarted","Data":"951f90e743b7af344414867f86ae9de20235355f90e5c9c0c4bd0a8a92d17ae9"} Sep 30 00:19:58 crc kubenswrapper[4809]: I0930 00:19:58.775561 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" event={"ID":"5b43be87-9a51-42ab-85cc-f193171a9682","Type":"ContainerStarted","Data":"e372e4e7f0cc37b5e32e88a6b46a36bc32814c77c8659337ada8e9248560dada"} Sep 30 00:19:58 crc kubenswrapper[4809]: I0930 00:19:58.780551 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" event={"ID":"83cb98f0-24e4-42a8-a44d-e96d65c87580","Type":"ContainerStarted","Data":"cc861002c3d13b7e6e220ddd1d11c120d1436d55b3714ee31bf15d1c47385836"} Sep 30 00:19:58 crc kubenswrapper[4809]: I0930 00:19:58.797736 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4" podStartSLOduration=32.306470434 podStartE2EDuration="34.797714504s" podCreationTimestamp="2025-09-30 00:19:24 +0000 UTC" firstStartedPulling="2025-09-30 00:19:55.288210698 +0000 UTC m=+646.324460096" lastFinishedPulling="2025-09-30 00:19:57.779454758 +0000 UTC m=+648.815704166" observedRunningTime="2025-09-30 00:19:58.795949605 +0000 UTC m=+649.832199013" watchObservedRunningTime="2025-09-30 00:19:58.797714504 +0000 UTC m=+649.833963912" Sep 30 00:19:58 crc kubenswrapper[4809]: I0930 00:19:58.839460 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4" podStartSLOduration=33.859352381 podStartE2EDuration="34.839442639s" podCreationTimestamp="2025-09-30 00:19:24 +0000 UTC" firstStartedPulling="2025-09-30 00:19:57.002117588 +0000 UTC m=+648.038366986" lastFinishedPulling="2025-09-30 00:19:57.982207836 +0000 UTC m=+649.018457244" observedRunningTime="2025-09-30 00:19:58.836012406 +0000 UTC m=+649.872261814" watchObservedRunningTime="2025-09-30 00:19:58.839442639 +0000 UTC m=+649.875692037" Sep 30 00:20:02 crc kubenswrapper[4809]: I0930 00:20:02.807037 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" event={"ID":"a8ae4f48-169a-409c-bae6-6a89fb1263cb","Type":"ContainerStarted","Data":"2a00a15a8c937e426cf4bea7e76e99367a1b3e480415aa7063eaa019a4ac149c"} Sep 30 00:20:02 crc kubenswrapper[4809]: I0930 00:20:02.809892 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" event={"ID":"8eefb09b-5a65-4213-b5e8-9842cbd24fcf","Type":"ContainerStarted","Data":"8904d1903d0719277d9c69ed8f858b8efd28a9c4f6d1ff4ff290ec0c3276d2af"} Sep 30 00:20:02 crc kubenswrapper[4809]: I0930 00:20:02.810218 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:20:02 crc kubenswrapper[4809]: I0930 00:20:02.812330 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" Sep 30 00:20:02 crc kubenswrapper[4809]: I0930 00:20:02.827185 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5tk2x" podStartSLOduration=32.245833385 podStartE2EDuration="38.827167573s" podCreationTimestamp="2025-09-30 00:19:24 +0000 UTC" firstStartedPulling="2025-09-30 00:19:55.289483824 +0000 UTC m=+646.325733232" lastFinishedPulling="2025-09-30 00:20:01.870818012 +0000 UTC m=+652.907067420" observedRunningTime="2025-09-30 00:20:02.822821805 +0000 UTC m=+653.859071213" watchObservedRunningTime="2025-09-30 00:20:02.827167573 +0000 UTC m=+653.863416981" Sep 30 00:20:02 crc kubenswrapper[4809]: I0930 00:20:02.851266 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-cc5f78dfc-d9b5l" podStartSLOduration=32.111893683 podStartE2EDuration="38.851243548s" podCreationTimestamp="2025-09-30 00:19:24 +0000 UTC" firstStartedPulling="2025-09-30 00:19:55.158915124 +0000 UTC m=+646.195164532" lastFinishedPulling="2025-09-30 00:20:01.898264989 +0000 UTC m=+652.934514397" observedRunningTime="2025-09-30 00:20:02.847775084 +0000 UTC m=+653.884024492" watchObservedRunningTime="2025-09-30 00:20:02.851243548 +0000 UTC m=+653.887492946" Sep 30 00:20:04 crc kubenswrapper[4809]: I0930 00:20:04.955806 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-54bc95c9fb-svnh2" Sep 30 00:20:09 crc kubenswrapper[4809]: I0930 00:20:09.924164 4809 scope.go:117] "RemoveContainer" containerID="2c471200a61cc3d7cb632c1e3690f62cfe5565dfda25ae3701efd13482755885" Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.753193 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-xjbqr"] Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.754178 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-xjbqr" Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.757222 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.757296 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.757450 4809 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-knrb6" Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.766429 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-xjbqr"] Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.773615 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-wldts"] Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.774492 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-wldts" Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.776764 4809 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-qwpk8" Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.785838 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scdgw\" (UniqueName: \"kubernetes.io/projected/fad5773f-9c6d-4050-bd1f-a3b02f66a1b5-kube-api-access-scdgw\") pod \"cert-manager-cainjector-7f985d654d-xjbqr\" (UID: \"fad5773f-9c6d-4050-bd1f-a3b02f66a1b5\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-xjbqr" Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.794200 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-wldts"] Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.804784 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-sf4xj"] Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.805940 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-sf4xj" Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.809928 4809 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-7frwt" Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.822451 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-sf4xj"] Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.853786 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-h6xqr_efc7b2e1-7308-483a-9117-02e83c45a528/kube-multus/2.log" Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.887428 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxbfj\" (UniqueName: \"kubernetes.io/projected/639e3dbe-9ddd-484e-917a-97c8e230d4b5-kube-api-access-bxbfj\") pod \"cert-manager-5b446d88c5-wldts\" (UID: \"639e3dbe-9ddd-484e-917a-97c8e230d4b5\") " pod="cert-manager/cert-manager-5b446d88c5-wldts" Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.887561 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8rkk\" (UniqueName: \"kubernetes.io/projected/eef61867-7bb5-4d67-a7b8-8d370bda1e8d-kube-api-access-d8rkk\") pod \"cert-manager-webhook-5655c58dd6-sf4xj\" (UID: \"eef61867-7bb5-4d67-a7b8-8d370bda1e8d\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-sf4xj" Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.887689 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scdgw\" (UniqueName: \"kubernetes.io/projected/fad5773f-9c6d-4050-bd1f-a3b02f66a1b5-kube-api-access-scdgw\") pod \"cert-manager-cainjector-7f985d654d-xjbqr\" (UID: \"fad5773f-9c6d-4050-bd1f-a3b02f66a1b5\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-xjbqr" Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.904963 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scdgw\" (UniqueName: \"kubernetes.io/projected/fad5773f-9c6d-4050-bd1f-a3b02f66a1b5-kube-api-access-scdgw\") pod \"cert-manager-cainjector-7f985d654d-xjbqr\" (UID: \"fad5773f-9c6d-4050-bd1f-a3b02f66a1b5\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-xjbqr" Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.988989 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxbfj\" (UniqueName: \"kubernetes.io/projected/639e3dbe-9ddd-484e-917a-97c8e230d4b5-kube-api-access-bxbfj\") pod \"cert-manager-5b446d88c5-wldts\" (UID: \"639e3dbe-9ddd-484e-917a-97c8e230d4b5\") " pod="cert-manager/cert-manager-5b446d88c5-wldts" Sep 30 00:20:10 crc kubenswrapper[4809]: I0930 00:20:10.989088 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8rkk\" (UniqueName: \"kubernetes.io/projected/eef61867-7bb5-4d67-a7b8-8d370bda1e8d-kube-api-access-d8rkk\") pod \"cert-manager-webhook-5655c58dd6-sf4xj\" (UID: \"eef61867-7bb5-4d67-a7b8-8d370bda1e8d\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-sf4xj" Sep 30 00:20:11 crc kubenswrapper[4809]: I0930 00:20:11.005402 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxbfj\" (UniqueName: \"kubernetes.io/projected/639e3dbe-9ddd-484e-917a-97c8e230d4b5-kube-api-access-bxbfj\") pod \"cert-manager-5b446d88c5-wldts\" (UID: \"639e3dbe-9ddd-484e-917a-97c8e230d4b5\") " pod="cert-manager/cert-manager-5b446d88c5-wldts" Sep 30 00:20:11 crc kubenswrapper[4809]: I0930 00:20:11.005899 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8rkk\" (UniqueName: \"kubernetes.io/projected/eef61867-7bb5-4d67-a7b8-8d370bda1e8d-kube-api-access-d8rkk\") pod \"cert-manager-webhook-5655c58dd6-sf4xj\" (UID: \"eef61867-7bb5-4d67-a7b8-8d370bda1e8d\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-sf4xj" Sep 30 00:20:11 crc kubenswrapper[4809]: I0930 00:20:11.073931 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-xjbqr" Sep 30 00:20:11 crc kubenswrapper[4809]: I0930 00:20:11.087162 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-wldts" Sep 30 00:20:11 crc kubenswrapper[4809]: I0930 00:20:11.122956 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-sf4xj" Sep 30 00:20:11 crc kubenswrapper[4809]: I0930 00:20:11.344052 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-wldts"] Sep 30 00:20:11 crc kubenswrapper[4809]: W0930 00:20:11.352503 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod639e3dbe_9ddd_484e_917a_97c8e230d4b5.slice/crio-5fd78482f431e7afe94d629ef18d9fdc30549db5909b1719e98a6016f1b102ae WatchSource:0}: Error finding container 5fd78482f431e7afe94d629ef18d9fdc30549db5909b1719e98a6016f1b102ae: Status 404 returned error can't find the container with id 5fd78482f431e7afe94d629ef18d9fdc30549db5909b1719e98a6016f1b102ae Sep 30 00:20:11 crc kubenswrapper[4809]: I0930 00:20:11.606686 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-xjbqr"] Sep 30 00:20:11 crc kubenswrapper[4809]: W0930 00:20:11.608297 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfad5773f_9c6d_4050_bd1f_a3b02f66a1b5.slice/crio-a74167f80b076fa983eafff3e7aa996143c300f9b7cd144172284b6d2b848d12 WatchSource:0}: Error finding container a74167f80b076fa983eafff3e7aa996143c300f9b7cd144172284b6d2b848d12: Status 404 returned error can't find the container with id a74167f80b076fa983eafff3e7aa996143c300f9b7cd144172284b6d2b848d12 Sep 30 00:20:11 crc kubenswrapper[4809]: I0930 00:20:11.610461 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-sf4xj"] Sep 30 00:20:11 crc kubenswrapper[4809]: W0930 00:20:11.611400 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeef61867_7bb5_4d67_a7b8_8d370bda1e8d.slice/crio-d99c58ea9b4a7f875ba8a3af2b8c3eddd95339cc2132926942215542a626963a WatchSource:0}: Error finding container d99c58ea9b4a7f875ba8a3af2b8c3eddd95339cc2132926942215542a626963a: Status 404 returned error can't find the container with id d99c58ea9b4a7f875ba8a3af2b8c3eddd95339cc2132926942215542a626963a Sep 30 00:20:11 crc kubenswrapper[4809]: I0930 00:20:11.861440 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-sf4xj" event={"ID":"eef61867-7bb5-4d67-a7b8-8d370bda1e8d","Type":"ContainerStarted","Data":"d99c58ea9b4a7f875ba8a3af2b8c3eddd95339cc2132926942215542a626963a"} Sep 30 00:20:11 crc kubenswrapper[4809]: I0930 00:20:11.863250 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-wldts" event={"ID":"639e3dbe-9ddd-484e-917a-97c8e230d4b5","Type":"ContainerStarted","Data":"5fd78482f431e7afe94d629ef18d9fdc30549db5909b1719e98a6016f1b102ae"} Sep 30 00:20:11 crc kubenswrapper[4809]: I0930 00:20:11.864518 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-xjbqr" event={"ID":"fad5773f-9c6d-4050-bd1f-a3b02f66a1b5","Type":"ContainerStarted","Data":"a74167f80b076fa983eafff3e7aa996143c300f9b7cd144172284b6d2b848d12"} Sep 30 00:20:13 crc kubenswrapper[4809]: I0930 00:20:13.882345 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-wldts" event={"ID":"639e3dbe-9ddd-484e-917a-97c8e230d4b5","Type":"ContainerStarted","Data":"7066b223c5eff8a3d3d751c9f350615153f45f987557316a01b85dc22b9a813d"} Sep 30 00:20:13 crc kubenswrapper[4809]: I0930 00:20:13.904087 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-wldts" podStartSLOduration=1.745191208 podStartE2EDuration="3.90406567s" podCreationTimestamp="2025-09-30 00:20:10 +0000 UTC" firstStartedPulling="2025-09-30 00:20:11.355754737 +0000 UTC m=+662.392004145" lastFinishedPulling="2025-09-30 00:20:13.514629199 +0000 UTC m=+664.550878607" observedRunningTime="2025-09-30 00:20:13.899139006 +0000 UTC m=+664.935388414" watchObservedRunningTime="2025-09-30 00:20:13.90406567 +0000 UTC m=+664.940315078" Sep 30 00:20:16 crc kubenswrapper[4809]: I0930 00:20:16.906377 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-xjbqr" event={"ID":"fad5773f-9c6d-4050-bd1f-a3b02f66a1b5","Type":"ContainerStarted","Data":"58d543efabf7061e1cdebaa65350a9f63d99c4becee8306984d57ea3dc00b985"} Sep 30 00:20:16 crc kubenswrapper[4809]: I0930 00:20:16.909590 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-sf4xj" event={"ID":"eef61867-7bb5-4d67-a7b8-8d370bda1e8d","Type":"ContainerStarted","Data":"f7b755263b19d16eb4841a13e16cbf4022de2f0411297db6e1a38c13e1148f45"} Sep 30 00:20:16 crc kubenswrapper[4809]: I0930 00:20:16.909749 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-sf4xj" Sep 30 00:20:16 crc kubenswrapper[4809]: I0930 00:20:16.924995 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-xjbqr" podStartSLOduration=2.816588911 podStartE2EDuration="6.924950097s" podCreationTimestamp="2025-09-30 00:20:10 +0000 UTC" firstStartedPulling="2025-09-30 00:20:11.610124891 +0000 UTC m=+662.646374299" lastFinishedPulling="2025-09-30 00:20:15.718486027 +0000 UTC m=+666.754735485" observedRunningTime="2025-09-30 00:20:16.923578999 +0000 UTC m=+667.959828407" watchObservedRunningTime="2025-09-30 00:20:16.924950097 +0000 UTC m=+667.961199505" Sep 30 00:20:16 crc kubenswrapper[4809]: I0930 00:20:16.941906 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-sf4xj" podStartSLOduration=2.76915889 podStartE2EDuration="6.941880358s" podCreationTimestamp="2025-09-30 00:20:10 +0000 UTC" firstStartedPulling="2025-09-30 00:20:11.613218395 +0000 UTC m=+662.649467803" lastFinishedPulling="2025-09-30 00:20:15.785939863 +0000 UTC m=+666.822189271" observedRunningTime="2025-09-30 00:20:16.935423482 +0000 UTC m=+667.971672890" watchObservedRunningTime="2025-09-30 00:20:16.941880358 +0000 UTC m=+667.978129806" Sep 30 00:20:21 crc kubenswrapper[4809]: I0930 00:20:21.134318 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-sf4xj" Sep 30 00:20:47 crc kubenswrapper[4809]: I0930 00:20:47.698922 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt"] Sep 30 00:20:47 crc kubenswrapper[4809]: I0930 00:20:47.700539 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt" Sep 30 00:20:47 crc kubenswrapper[4809]: I0930 00:20:47.702093 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 00:20:47 crc kubenswrapper[4809]: I0930 00:20:47.718579 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt"] Sep 30 00:20:47 crc kubenswrapper[4809]: I0930 00:20:47.874492 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/86edf678-f512-4b77-830f-c3d098171d07-util\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt\" (UID: \"86edf678-f512-4b77-830f-c3d098171d07\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt" Sep 30 00:20:47 crc kubenswrapper[4809]: I0930 00:20:47.874556 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/86edf678-f512-4b77-830f-c3d098171d07-bundle\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt\" (UID: \"86edf678-f512-4b77-830f-c3d098171d07\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt" Sep 30 00:20:47 crc kubenswrapper[4809]: I0930 00:20:47.874592 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4x7wb\" (UniqueName: \"kubernetes.io/projected/86edf678-f512-4b77-830f-c3d098171d07-kube-api-access-4x7wb\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt\" (UID: \"86edf678-f512-4b77-830f-c3d098171d07\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt" Sep 30 00:20:47 crc kubenswrapper[4809]: I0930 00:20:47.907550 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72"] Sep 30 00:20:47 crc kubenswrapper[4809]: I0930 00:20:47.908816 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72" Sep 30 00:20:47 crc kubenswrapper[4809]: I0930 00:20:47.916574 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72"] Sep 30 00:20:47 crc kubenswrapper[4809]: I0930 00:20:47.975522 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/86edf678-f512-4b77-830f-c3d098171d07-util\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt\" (UID: \"86edf678-f512-4b77-830f-c3d098171d07\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt" Sep 30 00:20:47 crc kubenswrapper[4809]: I0930 00:20:47.975602 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/86edf678-f512-4b77-830f-c3d098171d07-bundle\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt\" (UID: \"86edf678-f512-4b77-830f-c3d098171d07\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt" Sep 30 00:20:47 crc kubenswrapper[4809]: I0930 00:20:47.975674 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4x7wb\" (UniqueName: \"kubernetes.io/projected/86edf678-f512-4b77-830f-c3d098171d07-kube-api-access-4x7wb\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt\" (UID: \"86edf678-f512-4b77-830f-c3d098171d07\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt" Sep 30 00:20:47 crc kubenswrapper[4809]: I0930 00:20:47.976121 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/86edf678-f512-4b77-830f-c3d098171d07-util\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt\" (UID: \"86edf678-f512-4b77-830f-c3d098171d07\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt" Sep 30 00:20:47 crc kubenswrapper[4809]: I0930 00:20:47.976138 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/86edf678-f512-4b77-830f-c3d098171d07-bundle\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt\" (UID: \"86edf678-f512-4b77-830f-c3d098171d07\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt" Sep 30 00:20:47 crc kubenswrapper[4809]: I0930 00:20:47.993226 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4x7wb\" (UniqueName: \"kubernetes.io/projected/86edf678-f512-4b77-830f-c3d098171d07-kube-api-access-4x7wb\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt\" (UID: \"86edf678-f512-4b77-830f-c3d098171d07\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt" Sep 30 00:20:48 crc kubenswrapper[4809]: I0930 00:20:48.015683 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt" Sep 30 00:20:48 crc kubenswrapper[4809]: I0930 00:20:48.076837 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c9386292-91d5-4e1e-a63f-058cc8c88b9e-util\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72\" (UID: \"c9386292-91d5-4e1e-a63f-058cc8c88b9e\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72" Sep 30 00:20:48 crc kubenswrapper[4809]: I0930 00:20:48.077092 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c9386292-91d5-4e1e-a63f-058cc8c88b9e-bundle\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72\" (UID: \"c9386292-91d5-4e1e-a63f-058cc8c88b9e\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72" Sep 30 00:20:48 crc kubenswrapper[4809]: I0930 00:20:48.077116 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjqss\" (UniqueName: \"kubernetes.io/projected/c9386292-91d5-4e1e-a63f-058cc8c88b9e-kube-api-access-xjqss\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72\" (UID: \"c9386292-91d5-4e1e-a63f-058cc8c88b9e\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72" Sep 30 00:20:48 crc kubenswrapper[4809]: I0930 00:20:48.182302 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c9386292-91d5-4e1e-a63f-058cc8c88b9e-bundle\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72\" (UID: \"c9386292-91d5-4e1e-a63f-058cc8c88b9e\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72" Sep 30 00:20:48 crc kubenswrapper[4809]: I0930 00:20:48.182357 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjqss\" (UniqueName: \"kubernetes.io/projected/c9386292-91d5-4e1e-a63f-058cc8c88b9e-kube-api-access-xjqss\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72\" (UID: \"c9386292-91d5-4e1e-a63f-058cc8c88b9e\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72" Sep 30 00:20:48 crc kubenswrapper[4809]: I0930 00:20:48.182399 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c9386292-91d5-4e1e-a63f-058cc8c88b9e-util\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72\" (UID: \"c9386292-91d5-4e1e-a63f-058cc8c88b9e\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72" Sep 30 00:20:48 crc kubenswrapper[4809]: I0930 00:20:48.182907 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c9386292-91d5-4e1e-a63f-058cc8c88b9e-util\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72\" (UID: \"c9386292-91d5-4e1e-a63f-058cc8c88b9e\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72" Sep 30 00:20:48 crc kubenswrapper[4809]: I0930 00:20:48.183107 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c9386292-91d5-4e1e-a63f-058cc8c88b9e-bundle\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72\" (UID: \"c9386292-91d5-4e1e-a63f-058cc8c88b9e\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72" Sep 30 00:20:48 crc kubenswrapper[4809]: I0930 00:20:48.200592 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjqss\" (UniqueName: \"kubernetes.io/projected/c9386292-91d5-4e1e-a63f-058cc8c88b9e-kube-api-access-xjqss\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72\" (UID: \"c9386292-91d5-4e1e-a63f-058cc8c88b9e\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72" Sep 30 00:20:48 crc kubenswrapper[4809]: I0930 00:20:48.202787 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt"] Sep 30 00:20:48 crc kubenswrapper[4809]: I0930 00:20:48.223879 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72" Sep 30 00:20:48 crc kubenswrapper[4809]: I0930 00:20:48.411762 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72"] Sep 30 00:20:48 crc kubenswrapper[4809]: W0930 00:20:48.419072 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc9386292_91d5_4e1e_a63f_058cc8c88b9e.slice/crio-c5499a9eb3cb619a4f61e224640ec32e17edf968644e615795c710d8fd959fad WatchSource:0}: Error finding container c5499a9eb3cb619a4f61e224640ec32e17edf968644e615795c710d8fd959fad: Status 404 returned error can't find the container with id c5499a9eb3cb619a4f61e224640ec32e17edf968644e615795c710d8fd959fad Sep 30 00:20:49 crc kubenswrapper[4809]: I0930 00:20:49.135188 4809 generic.go:334] "Generic (PLEG): container finished" podID="86edf678-f512-4b77-830f-c3d098171d07" containerID="55c3b6692627655e74aa56fe8f981238bcd4584000902fa4c7e8710570fc5dfa" exitCode=0 Sep 30 00:20:49 crc kubenswrapper[4809]: I0930 00:20:49.135291 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt" event={"ID":"86edf678-f512-4b77-830f-c3d098171d07","Type":"ContainerDied","Data":"55c3b6692627655e74aa56fe8f981238bcd4584000902fa4c7e8710570fc5dfa"} Sep 30 00:20:49 crc kubenswrapper[4809]: I0930 00:20:49.135800 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt" event={"ID":"86edf678-f512-4b77-830f-c3d098171d07","Type":"ContainerStarted","Data":"b16845fb52659086b0dd26debf7055dcf08f14032a257f6911b0717bf5c5e5c3"} Sep 30 00:20:49 crc kubenswrapper[4809]: I0930 00:20:49.140460 4809 generic.go:334] "Generic (PLEG): container finished" podID="c9386292-91d5-4e1e-a63f-058cc8c88b9e" containerID="9a2da30ca93bc959ec67a62a8160ffd6b6447223f34c9ac5887fbf8e3d5004ed" exitCode=0 Sep 30 00:20:49 crc kubenswrapper[4809]: I0930 00:20:49.140540 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72" event={"ID":"c9386292-91d5-4e1e-a63f-058cc8c88b9e","Type":"ContainerDied","Data":"9a2da30ca93bc959ec67a62a8160ffd6b6447223f34c9ac5887fbf8e3d5004ed"} Sep 30 00:20:49 crc kubenswrapper[4809]: I0930 00:20:49.140587 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72" event={"ID":"c9386292-91d5-4e1e-a63f-058cc8c88b9e","Type":"ContainerStarted","Data":"c5499a9eb3cb619a4f61e224640ec32e17edf968644e615795c710d8fd959fad"} Sep 30 00:20:51 crc kubenswrapper[4809]: I0930 00:20:51.151428 4809 generic.go:334] "Generic (PLEG): container finished" podID="86edf678-f512-4b77-830f-c3d098171d07" containerID="ced435fda57a3ecfed9c55471c2884f8d6d0838222b235fe93ce77c7a69eb126" exitCode=0 Sep 30 00:20:51 crc kubenswrapper[4809]: I0930 00:20:51.151533 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt" event={"ID":"86edf678-f512-4b77-830f-c3d098171d07","Type":"ContainerDied","Data":"ced435fda57a3ecfed9c55471c2884f8d6d0838222b235fe93ce77c7a69eb126"} Sep 30 00:20:51 crc kubenswrapper[4809]: I0930 00:20:51.153754 4809 generic.go:334] "Generic (PLEG): container finished" podID="c9386292-91d5-4e1e-a63f-058cc8c88b9e" containerID="a10850b349c966f83851c0ebfea873bfccd5fa367ed1b7bf7c69c1831271634e" exitCode=0 Sep 30 00:20:51 crc kubenswrapper[4809]: I0930 00:20:51.153792 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72" event={"ID":"c9386292-91d5-4e1e-a63f-058cc8c88b9e","Type":"ContainerDied","Data":"a10850b349c966f83851c0ebfea873bfccd5fa367ed1b7bf7c69c1831271634e"} Sep 30 00:20:52 crc kubenswrapper[4809]: I0930 00:20:52.160748 4809 generic.go:334] "Generic (PLEG): container finished" podID="c9386292-91d5-4e1e-a63f-058cc8c88b9e" containerID="ef1aed4d710c9f39c725661f929c954ab2f740e6f39758ccdb1c6b0daa9fd745" exitCode=0 Sep 30 00:20:52 crc kubenswrapper[4809]: I0930 00:20:52.160838 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72" event={"ID":"c9386292-91d5-4e1e-a63f-058cc8c88b9e","Type":"ContainerDied","Data":"ef1aed4d710c9f39c725661f929c954ab2f740e6f39758ccdb1c6b0daa9fd745"} Sep 30 00:20:52 crc kubenswrapper[4809]: I0930 00:20:52.163582 4809 generic.go:334] "Generic (PLEG): container finished" podID="86edf678-f512-4b77-830f-c3d098171d07" containerID="6bac4da5ad8016a531a214a6974762471fad4fa47bca8021c8c7b6a70cd7b59f" exitCode=0 Sep 30 00:20:52 crc kubenswrapper[4809]: I0930 00:20:52.163613 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt" event={"ID":"86edf678-f512-4b77-830f-c3d098171d07","Type":"ContainerDied","Data":"6bac4da5ad8016a531a214a6974762471fad4fa47bca8021c8c7b6a70cd7b59f"} Sep 30 00:20:53 crc kubenswrapper[4809]: I0930 00:20:53.441465 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt" Sep 30 00:20:53 crc kubenswrapper[4809]: I0930 00:20:53.494496 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72" Sep 30 00:20:53 crc kubenswrapper[4809]: I0930 00:20:53.557316 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/86edf678-f512-4b77-830f-c3d098171d07-util\") pod \"86edf678-f512-4b77-830f-c3d098171d07\" (UID: \"86edf678-f512-4b77-830f-c3d098171d07\") " Sep 30 00:20:53 crc kubenswrapper[4809]: I0930 00:20:53.557397 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/86edf678-f512-4b77-830f-c3d098171d07-bundle\") pod \"86edf678-f512-4b77-830f-c3d098171d07\" (UID: \"86edf678-f512-4b77-830f-c3d098171d07\") " Sep 30 00:20:53 crc kubenswrapper[4809]: I0930 00:20:53.557428 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4x7wb\" (UniqueName: \"kubernetes.io/projected/86edf678-f512-4b77-830f-c3d098171d07-kube-api-access-4x7wb\") pod \"86edf678-f512-4b77-830f-c3d098171d07\" (UID: \"86edf678-f512-4b77-830f-c3d098171d07\") " Sep 30 00:20:53 crc kubenswrapper[4809]: I0930 00:20:53.558693 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86edf678-f512-4b77-830f-c3d098171d07-bundle" (OuterVolumeSpecName: "bundle") pod "86edf678-f512-4b77-830f-c3d098171d07" (UID: "86edf678-f512-4b77-830f-c3d098171d07"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:20:53 crc kubenswrapper[4809]: I0930 00:20:53.564225 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86edf678-f512-4b77-830f-c3d098171d07-kube-api-access-4x7wb" (OuterVolumeSpecName: "kube-api-access-4x7wb") pod "86edf678-f512-4b77-830f-c3d098171d07" (UID: "86edf678-f512-4b77-830f-c3d098171d07"). InnerVolumeSpecName "kube-api-access-4x7wb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:20:53 crc kubenswrapper[4809]: I0930 00:20:53.571492 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86edf678-f512-4b77-830f-c3d098171d07-util" (OuterVolumeSpecName: "util") pod "86edf678-f512-4b77-830f-c3d098171d07" (UID: "86edf678-f512-4b77-830f-c3d098171d07"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:20:53 crc kubenswrapper[4809]: I0930 00:20:53.658213 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjqss\" (UniqueName: \"kubernetes.io/projected/c9386292-91d5-4e1e-a63f-058cc8c88b9e-kube-api-access-xjqss\") pod \"c9386292-91d5-4e1e-a63f-058cc8c88b9e\" (UID: \"c9386292-91d5-4e1e-a63f-058cc8c88b9e\") " Sep 30 00:20:53 crc kubenswrapper[4809]: I0930 00:20:53.658603 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c9386292-91d5-4e1e-a63f-058cc8c88b9e-util\") pod \"c9386292-91d5-4e1e-a63f-058cc8c88b9e\" (UID: \"c9386292-91d5-4e1e-a63f-058cc8c88b9e\") " Sep 30 00:20:53 crc kubenswrapper[4809]: I0930 00:20:53.658839 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c9386292-91d5-4e1e-a63f-058cc8c88b9e-bundle\") pod \"c9386292-91d5-4e1e-a63f-058cc8c88b9e\" (UID: \"c9386292-91d5-4e1e-a63f-058cc8c88b9e\") " Sep 30 00:20:53 crc kubenswrapper[4809]: I0930 00:20:53.659184 4809 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/86edf678-f512-4b77-830f-c3d098171d07-util\") on node \"crc\" DevicePath \"\"" Sep 30 00:20:53 crc kubenswrapper[4809]: I0930 00:20:53.659204 4809 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/86edf678-f512-4b77-830f-c3d098171d07-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:20:53 crc kubenswrapper[4809]: I0930 00:20:53.659255 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4x7wb\" (UniqueName: \"kubernetes.io/projected/86edf678-f512-4b77-830f-c3d098171d07-kube-api-access-4x7wb\") on node \"crc\" DevicePath \"\"" Sep 30 00:20:53 crc kubenswrapper[4809]: I0930 00:20:53.659912 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9386292-91d5-4e1e-a63f-058cc8c88b9e-bundle" (OuterVolumeSpecName: "bundle") pod "c9386292-91d5-4e1e-a63f-058cc8c88b9e" (UID: "c9386292-91d5-4e1e-a63f-058cc8c88b9e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:20:53 crc kubenswrapper[4809]: I0930 00:20:53.661029 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9386292-91d5-4e1e-a63f-058cc8c88b9e-kube-api-access-xjqss" (OuterVolumeSpecName: "kube-api-access-xjqss") pod "c9386292-91d5-4e1e-a63f-058cc8c88b9e" (UID: "c9386292-91d5-4e1e-a63f-058cc8c88b9e"). InnerVolumeSpecName "kube-api-access-xjqss". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:20:53 crc kubenswrapper[4809]: I0930 00:20:53.673170 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9386292-91d5-4e1e-a63f-058cc8c88b9e-util" (OuterVolumeSpecName: "util") pod "c9386292-91d5-4e1e-a63f-058cc8c88b9e" (UID: "c9386292-91d5-4e1e-a63f-058cc8c88b9e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:20:53 crc kubenswrapper[4809]: I0930 00:20:53.760439 4809 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c9386292-91d5-4e1e-a63f-058cc8c88b9e-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:20:53 crc kubenswrapper[4809]: I0930 00:20:53.760492 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjqss\" (UniqueName: \"kubernetes.io/projected/c9386292-91d5-4e1e-a63f-058cc8c88b9e-kube-api-access-xjqss\") on node \"crc\" DevicePath \"\"" Sep 30 00:20:53 crc kubenswrapper[4809]: I0930 00:20:53.760507 4809 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c9386292-91d5-4e1e-a63f-058cc8c88b9e-util\") on node \"crc\" DevicePath \"\"" Sep 30 00:20:54 crc kubenswrapper[4809]: I0930 00:20:54.178967 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72" event={"ID":"c9386292-91d5-4e1e-a63f-058cc8c88b9e","Type":"ContainerDied","Data":"c5499a9eb3cb619a4f61e224640ec32e17edf968644e615795c710d8fd959fad"} Sep 30 00:20:54 crc kubenswrapper[4809]: I0930 00:20:54.179023 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5499a9eb3cb619a4f61e224640ec32e17edf968644e615795c710d8fd959fad" Sep 30 00:20:54 crc kubenswrapper[4809]: I0930 00:20:54.178987 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72" Sep 30 00:20:54 crc kubenswrapper[4809]: I0930 00:20:54.180818 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt" event={"ID":"86edf678-f512-4b77-830f-c3d098171d07","Type":"ContainerDied","Data":"b16845fb52659086b0dd26debf7055dcf08f14032a257f6911b0717bf5c5e5c3"} Sep 30 00:20:54 crc kubenswrapper[4809]: I0930 00:20:54.180849 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b16845fb52659086b0dd26debf7055dcf08f14032a257f6911b0717bf5c5e5c3" Sep 30 00:20:54 crc kubenswrapper[4809]: I0930 00:20:54.180871 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt" Sep 30 00:20:55 crc kubenswrapper[4809]: I0930 00:20:55.325614 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:20:55 crc kubenswrapper[4809]: I0930 00:20:55.326419 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.384281 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m"] Sep 30 00:21:04 crc kubenswrapper[4809]: E0930 00:21:04.385032 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86edf678-f512-4b77-830f-c3d098171d07" containerName="pull" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.385050 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="86edf678-f512-4b77-830f-c3d098171d07" containerName="pull" Sep 30 00:21:04 crc kubenswrapper[4809]: E0930 00:21:04.385064 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9386292-91d5-4e1e-a63f-058cc8c88b9e" containerName="util" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.385071 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9386292-91d5-4e1e-a63f-058cc8c88b9e" containerName="util" Sep 30 00:21:04 crc kubenswrapper[4809]: E0930 00:21:04.385082 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86edf678-f512-4b77-830f-c3d098171d07" containerName="extract" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.385250 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="86edf678-f512-4b77-830f-c3d098171d07" containerName="extract" Sep 30 00:21:04 crc kubenswrapper[4809]: E0930 00:21:04.385262 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86edf678-f512-4b77-830f-c3d098171d07" containerName="util" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.385269 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="86edf678-f512-4b77-830f-c3d098171d07" containerName="util" Sep 30 00:21:04 crc kubenswrapper[4809]: E0930 00:21:04.385281 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9386292-91d5-4e1e-a63f-058cc8c88b9e" containerName="pull" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.385288 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9386292-91d5-4e1e-a63f-058cc8c88b9e" containerName="pull" Sep 30 00:21:04 crc kubenswrapper[4809]: E0930 00:21:04.385298 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9386292-91d5-4e1e-a63f-058cc8c88b9e" containerName="extract" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.385305 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9386292-91d5-4e1e-a63f-058cc8c88b9e" containerName="extract" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.385415 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9386292-91d5-4e1e-a63f-058cc8c88b9e" containerName="extract" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.385425 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="86edf678-f512-4b77-830f-c3d098171d07" containerName="extract" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.385999 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.387579 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"loki-operator-manager-config" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.387578 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"openshift-service-ca.crt" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.387678 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"kube-root-ca.crt" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.388181 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-service-cert" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.388546 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-dockercfg-lhkgv" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.388847 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-metrics" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.404173 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m"] Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.491665 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cbc83e5f-ab0e-42be-96ed-efaa5f594ff7-webhook-cert\") pod \"loki-operator-controller-manager-6596b6db9-chq2m\" (UID: \"cbc83e5f-ab0e-42be-96ed-efaa5f594ff7\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.491709 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/cbc83e5f-ab0e-42be-96ed-efaa5f594ff7-manager-config\") pod \"loki-operator-controller-manager-6596b6db9-chq2m\" (UID: \"cbc83e5f-ab0e-42be-96ed-efaa5f594ff7\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.491730 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zdjv\" (UniqueName: \"kubernetes.io/projected/cbc83e5f-ab0e-42be-96ed-efaa5f594ff7-kube-api-access-8zdjv\") pod \"loki-operator-controller-manager-6596b6db9-chq2m\" (UID: \"cbc83e5f-ab0e-42be-96ed-efaa5f594ff7\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.491881 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cbc83e5f-ab0e-42be-96ed-efaa5f594ff7-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-6596b6db9-chq2m\" (UID: \"cbc83e5f-ab0e-42be-96ed-efaa5f594ff7\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.491973 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cbc83e5f-ab0e-42be-96ed-efaa5f594ff7-apiservice-cert\") pod \"loki-operator-controller-manager-6596b6db9-chq2m\" (UID: \"cbc83e5f-ab0e-42be-96ed-efaa5f594ff7\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.593301 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cbc83e5f-ab0e-42be-96ed-efaa5f594ff7-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-6596b6db9-chq2m\" (UID: \"cbc83e5f-ab0e-42be-96ed-efaa5f594ff7\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.593366 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cbc83e5f-ab0e-42be-96ed-efaa5f594ff7-apiservice-cert\") pod \"loki-operator-controller-manager-6596b6db9-chq2m\" (UID: \"cbc83e5f-ab0e-42be-96ed-efaa5f594ff7\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.593397 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cbc83e5f-ab0e-42be-96ed-efaa5f594ff7-webhook-cert\") pod \"loki-operator-controller-manager-6596b6db9-chq2m\" (UID: \"cbc83e5f-ab0e-42be-96ed-efaa5f594ff7\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.593414 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/cbc83e5f-ab0e-42be-96ed-efaa5f594ff7-manager-config\") pod \"loki-operator-controller-manager-6596b6db9-chq2m\" (UID: \"cbc83e5f-ab0e-42be-96ed-efaa5f594ff7\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.593434 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zdjv\" (UniqueName: \"kubernetes.io/projected/cbc83e5f-ab0e-42be-96ed-efaa5f594ff7-kube-api-access-8zdjv\") pod \"loki-operator-controller-manager-6596b6db9-chq2m\" (UID: \"cbc83e5f-ab0e-42be-96ed-efaa5f594ff7\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.594585 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/cbc83e5f-ab0e-42be-96ed-efaa5f594ff7-manager-config\") pod \"loki-operator-controller-manager-6596b6db9-chq2m\" (UID: \"cbc83e5f-ab0e-42be-96ed-efaa5f594ff7\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.599983 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cbc83e5f-ab0e-42be-96ed-efaa5f594ff7-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-6596b6db9-chq2m\" (UID: \"cbc83e5f-ab0e-42be-96ed-efaa5f594ff7\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.600293 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cbc83e5f-ab0e-42be-96ed-efaa5f594ff7-apiservice-cert\") pod \"loki-operator-controller-manager-6596b6db9-chq2m\" (UID: \"cbc83e5f-ab0e-42be-96ed-efaa5f594ff7\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.603453 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cbc83e5f-ab0e-42be-96ed-efaa5f594ff7-webhook-cert\") pod \"loki-operator-controller-manager-6596b6db9-chq2m\" (UID: \"cbc83e5f-ab0e-42be-96ed-efaa5f594ff7\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.612561 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zdjv\" (UniqueName: \"kubernetes.io/projected/cbc83e5f-ab0e-42be-96ed-efaa5f594ff7-kube-api-access-8zdjv\") pod \"loki-operator-controller-manager-6596b6db9-chq2m\" (UID: \"cbc83e5f-ab0e-42be-96ed-efaa5f594ff7\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" Sep 30 00:21:04 crc kubenswrapper[4809]: I0930 00:21:04.705165 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" Sep 30 00:21:05 crc kubenswrapper[4809]: I0930 00:21:05.184023 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m"] Sep 30 00:21:05 crc kubenswrapper[4809]: W0930 00:21:05.196373 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcbc83e5f_ab0e_42be_96ed_efaa5f594ff7.slice/crio-0ab328e31361d125bf253bc30104159789b482d7be7c0c4461aa42db589814d5 WatchSource:0}: Error finding container 0ab328e31361d125bf253bc30104159789b482d7be7c0c4461aa42db589814d5: Status 404 returned error can't find the container with id 0ab328e31361d125bf253bc30104159789b482d7be7c0c4461aa42db589814d5 Sep 30 00:21:05 crc kubenswrapper[4809]: I0930 00:21:05.240919 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" event={"ID":"cbc83e5f-ab0e-42be-96ed-efaa5f594ff7","Type":"ContainerStarted","Data":"0ab328e31361d125bf253bc30104159789b482d7be7c0c4461aa42db589814d5"} Sep 30 00:21:08 crc kubenswrapper[4809]: I0930 00:21:08.331788 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/cluster-logging-operator-fcc886d58-l9g4b"] Sep 30 00:21:08 crc kubenswrapper[4809]: I0930 00:21:08.332522 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/cluster-logging-operator-fcc886d58-l9g4b" Sep 30 00:21:08 crc kubenswrapper[4809]: I0930 00:21:08.335155 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"openshift-service-ca.crt" Sep 30 00:21:08 crc kubenswrapper[4809]: I0930 00:21:08.335293 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"cluster-logging-operator-dockercfg-tzpks" Sep 30 00:21:08 crc kubenswrapper[4809]: I0930 00:21:08.346590 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"kube-root-ca.crt" Sep 30 00:21:08 crc kubenswrapper[4809]: I0930 00:21:08.361212 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/cluster-logging-operator-fcc886d58-l9g4b"] Sep 30 00:21:08 crc kubenswrapper[4809]: I0930 00:21:08.450463 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvjrs\" (UniqueName: \"kubernetes.io/projected/bfea1e6a-7933-4546-a9b5-1d16034b57a3-kube-api-access-xvjrs\") pod \"cluster-logging-operator-fcc886d58-l9g4b\" (UID: \"bfea1e6a-7933-4546-a9b5-1d16034b57a3\") " pod="openshift-logging/cluster-logging-operator-fcc886d58-l9g4b" Sep 30 00:21:08 crc kubenswrapper[4809]: I0930 00:21:08.551270 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvjrs\" (UniqueName: \"kubernetes.io/projected/bfea1e6a-7933-4546-a9b5-1d16034b57a3-kube-api-access-xvjrs\") pod \"cluster-logging-operator-fcc886d58-l9g4b\" (UID: \"bfea1e6a-7933-4546-a9b5-1d16034b57a3\") " pod="openshift-logging/cluster-logging-operator-fcc886d58-l9g4b" Sep 30 00:21:08 crc kubenswrapper[4809]: I0930 00:21:08.570240 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvjrs\" (UniqueName: \"kubernetes.io/projected/bfea1e6a-7933-4546-a9b5-1d16034b57a3-kube-api-access-xvjrs\") pod \"cluster-logging-operator-fcc886d58-l9g4b\" (UID: \"bfea1e6a-7933-4546-a9b5-1d16034b57a3\") " pod="openshift-logging/cluster-logging-operator-fcc886d58-l9g4b" Sep 30 00:21:08 crc kubenswrapper[4809]: I0930 00:21:08.651985 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/cluster-logging-operator-fcc886d58-l9g4b" Sep 30 00:21:10 crc kubenswrapper[4809]: I0930 00:21:10.180893 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/cluster-logging-operator-fcc886d58-l9g4b"] Sep 30 00:21:10 crc kubenswrapper[4809]: I0930 00:21:10.271819 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/cluster-logging-operator-fcc886d58-l9g4b" event={"ID":"bfea1e6a-7933-4546-a9b5-1d16034b57a3","Type":"ContainerStarted","Data":"6db2fac226bab21557664bb479f2881818429973798045ee16b2efa3cee4355d"} Sep 30 00:21:10 crc kubenswrapper[4809]: I0930 00:21:10.273033 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" event={"ID":"cbc83e5f-ab0e-42be-96ed-efaa5f594ff7","Type":"ContainerStarted","Data":"201470d054273a45a5ea058edf9b8b465fe0fc4c9cf8519e434123469a49e903"} Sep 30 00:21:18 crc kubenswrapper[4809]: I0930 00:21:18.348128 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" event={"ID":"cbc83e5f-ab0e-42be-96ed-efaa5f594ff7","Type":"ContainerStarted","Data":"55fd539f08e79859d9347e131b274c2ab0a611eb4e7220d56cf6307a7ee8bb95"} Sep 30 00:21:18 crc kubenswrapper[4809]: I0930 00:21:18.348749 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" Sep 30 00:21:18 crc kubenswrapper[4809]: I0930 00:21:18.351118 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" Sep 30 00:21:18 crc kubenswrapper[4809]: I0930 00:21:18.351703 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/cluster-logging-operator-fcc886d58-l9g4b" event={"ID":"bfea1e6a-7933-4546-a9b5-1d16034b57a3","Type":"ContainerStarted","Data":"ef363508104a8ba0029e5985eaaa3db8c1cb666afda69fd82666c0fae3e3eb8d"} Sep 30 00:21:18 crc kubenswrapper[4809]: I0930 00:21:18.409856 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators-redhat/loki-operator-controller-manager-6596b6db9-chq2m" podStartSLOduration=1.474320974 podStartE2EDuration="14.409840049s" podCreationTimestamp="2025-09-30 00:21:04 +0000 UTC" firstStartedPulling="2025-09-30 00:21:05.198417656 +0000 UTC m=+716.234667064" lastFinishedPulling="2025-09-30 00:21:18.133936721 +0000 UTC m=+729.170186139" observedRunningTime="2025-09-30 00:21:18.383567655 +0000 UTC m=+729.419817083" watchObservedRunningTime="2025-09-30 00:21:18.409840049 +0000 UTC m=+729.446089457" Sep 30 00:21:18 crc kubenswrapper[4809]: I0930 00:21:18.412306 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/cluster-logging-operator-fcc886d58-l9g4b" podStartSLOduration=2.466581202 podStartE2EDuration="10.412298855s" podCreationTimestamp="2025-09-30 00:21:08 +0000 UTC" firstStartedPulling="2025-09-30 00:21:10.18715961 +0000 UTC m=+721.223409018" lastFinishedPulling="2025-09-30 00:21:18.132877263 +0000 UTC m=+729.169126671" observedRunningTime="2025-09-30 00:21:18.407265079 +0000 UTC m=+729.443514487" watchObservedRunningTime="2025-09-30 00:21:18.412298855 +0000 UTC m=+729.448548263" Sep 30 00:21:23 crc kubenswrapper[4809]: I0930 00:21:23.728808 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["minio-dev/minio"] Sep 30 00:21:23 crc kubenswrapper[4809]: I0930 00:21:23.730172 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Sep 30 00:21:23 crc kubenswrapper[4809]: I0930 00:21:23.733651 4809 reflector.go:368] Caches populated for *v1.Secret from object-"minio-dev"/"default-dockercfg-wn245" Sep 30 00:21:23 crc kubenswrapper[4809]: I0930 00:21:23.733899 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"openshift-service-ca.crt" Sep 30 00:21:23 crc kubenswrapper[4809]: I0930 00:21:23.734076 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"kube-root-ca.crt" Sep 30 00:21:23 crc kubenswrapper[4809]: I0930 00:21:23.734137 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Sep 30 00:21:23 crc kubenswrapper[4809]: I0930 00:21:23.744619 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-06d15a48-7af0-4785-99a4-0b68e763d18b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-06d15a48-7af0-4785-99a4-0b68e763d18b\") pod \"minio\" (UID: \"1294ef69-db6e-4b07-8d87-3138e740d3c4\") " pod="minio-dev/minio" Sep 30 00:21:23 crc kubenswrapper[4809]: I0930 00:21:23.744808 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdqps\" (UniqueName: \"kubernetes.io/projected/1294ef69-db6e-4b07-8d87-3138e740d3c4-kube-api-access-rdqps\") pod \"minio\" (UID: \"1294ef69-db6e-4b07-8d87-3138e740d3c4\") " pod="minio-dev/minio" Sep 30 00:21:23 crc kubenswrapper[4809]: I0930 00:21:23.846272 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdqps\" (UniqueName: \"kubernetes.io/projected/1294ef69-db6e-4b07-8d87-3138e740d3c4-kube-api-access-rdqps\") pod \"minio\" (UID: \"1294ef69-db6e-4b07-8d87-3138e740d3c4\") " pod="minio-dev/minio" Sep 30 00:21:23 crc kubenswrapper[4809]: I0930 00:21:23.846609 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-06d15a48-7af0-4785-99a4-0b68e763d18b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-06d15a48-7af0-4785-99a4-0b68e763d18b\") pod \"minio\" (UID: \"1294ef69-db6e-4b07-8d87-3138e740d3c4\") " pod="minio-dev/minio" Sep 30 00:21:23 crc kubenswrapper[4809]: I0930 00:21:23.850020 4809 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 00:21:23 crc kubenswrapper[4809]: I0930 00:21:23.850053 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-06d15a48-7af0-4785-99a4-0b68e763d18b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-06d15a48-7af0-4785-99a4-0b68e763d18b\") pod \"minio\" (UID: \"1294ef69-db6e-4b07-8d87-3138e740d3c4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e20a6c405b5e81769d19e73c20cfc9132d45022cc55a840fea63ef5c1459af7b/globalmount\"" pod="minio-dev/minio" Sep 30 00:21:23 crc kubenswrapper[4809]: I0930 00:21:23.866026 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdqps\" (UniqueName: \"kubernetes.io/projected/1294ef69-db6e-4b07-8d87-3138e740d3c4-kube-api-access-rdqps\") pod \"minio\" (UID: \"1294ef69-db6e-4b07-8d87-3138e740d3c4\") " pod="minio-dev/minio" Sep 30 00:21:23 crc kubenswrapper[4809]: I0930 00:21:23.879297 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-06d15a48-7af0-4785-99a4-0b68e763d18b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-06d15a48-7af0-4785-99a4-0b68e763d18b\") pod \"minio\" (UID: \"1294ef69-db6e-4b07-8d87-3138e740d3c4\") " pod="minio-dev/minio" Sep 30 00:21:24 crc kubenswrapper[4809]: I0930 00:21:24.051153 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Sep 30 00:21:24 crc kubenswrapper[4809]: I0930 00:21:24.257260 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Sep 30 00:21:24 crc kubenswrapper[4809]: I0930 00:21:24.389797 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"1294ef69-db6e-4b07-8d87-3138e740d3c4","Type":"ContainerStarted","Data":"2e7847cf03e18c33b527778a7ea1c46f8e2757e6e9e9b39a25de13c04bf1794f"} Sep 30 00:21:25 crc kubenswrapper[4809]: I0930 00:21:25.325312 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:21:25 crc kubenswrapper[4809]: I0930 00:21:25.325368 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:21:28 crc kubenswrapper[4809]: I0930 00:21:28.419603 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"1294ef69-db6e-4b07-8d87-3138e740d3c4","Type":"ContainerStarted","Data":"21388837377e9c506836260c5733dbf3a2a147974b65fd2505577d5c3e0b7a8f"} Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.415256 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="minio-dev/minio" podStartSLOduration=8.340164452 podStartE2EDuration="11.415240048s" podCreationTimestamp="2025-09-30 00:21:21 +0000 UTC" firstStartedPulling="2025-09-30 00:21:24.269975205 +0000 UTC m=+735.306224613" lastFinishedPulling="2025-09-30 00:21:27.345050801 +0000 UTC m=+738.381300209" observedRunningTime="2025-09-30 00:21:28.435747564 +0000 UTC m=+739.471996972" watchObservedRunningTime="2025-09-30 00:21:32.415240048 +0000 UTC m=+743.451489456" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.416670 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7"] Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.417355 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.419293 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-ca-bundle" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.419436 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-dockercfg-km74f" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.419561 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-config" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.419688 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-distributor-grpc" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.419791 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-distributor-http" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.438223 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7"] Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.469390 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pz87s\" (UniqueName: \"kubernetes.io/projected/70ea64fe-b6ed-460c-b37e-fb80fe0420af-kube-api-access-pz87s\") pod \"logging-loki-distributor-67c9b4c785-5tqn7\" (UID: \"70ea64fe-b6ed-460c-b37e-fb80fe0420af\") " pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.469666 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-distributor-http\" (UniqueName: \"kubernetes.io/secret/70ea64fe-b6ed-460c-b37e-fb80fe0420af-logging-loki-distributor-http\") pod \"logging-loki-distributor-67c9b4c785-5tqn7\" (UID: \"70ea64fe-b6ed-460c-b37e-fb80fe0420af\") " pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.469750 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70ea64fe-b6ed-460c-b37e-fb80fe0420af-config\") pod \"logging-loki-distributor-67c9b4c785-5tqn7\" (UID: \"70ea64fe-b6ed-460c-b37e-fb80fe0420af\") " pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.469821 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/70ea64fe-b6ed-460c-b37e-fb80fe0420af-logging-loki-distributor-grpc\") pod \"logging-loki-distributor-67c9b4c785-5tqn7\" (UID: \"70ea64fe-b6ed-460c-b37e-fb80fe0420af\") " pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.469953 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/70ea64fe-b6ed-460c-b37e-fb80fe0420af-logging-loki-ca-bundle\") pod \"logging-loki-distributor-67c9b4c785-5tqn7\" (UID: \"70ea64fe-b6ed-460c-b37e-fb80fe0420af\") " pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.571236 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-distributor-http\" (UniqueName: \"kubernetes.io/secret/70ea64fe-b6ed-460c-b37e-fb80fe0420af-logging-loki-distributor-http\") pod \"logging-loki-distributor-67c9b4c785-5tqn7\" (UID: \"70ea64fe-b6ed-460c-b37e-fb80fe0420af\") " pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.571478 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70ea64fe-b6ed-460c-b37e-fb80fe0420af-config\") pod \"logging-loki-distributor-67c9b4c785-5tqn7\" (UID: \"70ea64fe-b6ed-460c-b37e-fb80fe0420af\") " pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.571550 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/70ea64fe-b6ed-460c-b37e-fb80fe0420af-logging-loki-distributor-grpc\") pod \"logging-loki-distributor-67c9b4c785-5tqn7\" (UID: \"70ea64fe-b6ed-460c-b37e-fb80fe0420af\") " pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.571694 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/70ea64fe-b6ed-460c-b37e-fb80fe0420af-logging-loki-ca-bundle\") pod \"logging-loki-distributor-67c9b4c785-5tqn7\" (UID: \"70ea64fe-b6ed-460c-b37e-fb80fe0420af\") " pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.571788 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pz87s\" (UniqueName: \"kubernetes.io/projected/70ea64fe-b6ed-460c-b37e-fb80fe0420af-kube-api-access-pz87s\") pod \"logging-loki-distributor-67c9b4c785-5tqn7\" (UID: \"70ea64fe-b6ed-460c-b37e-fb80fe0420af\") " pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.588085 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/70ea64fe-b6ed-460c-b37e-fb80fe0420af-logging-loki-distributor-grpc\") pod \"logging-loki-distributor-67c9b4c785-5tqn7\" (UID: \"70ea64fe-b6ed-460c-b37e-fb80fe0420af\") " pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.588164 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/70ea64fe-b6ed-460c-b37e-fb80fe0420af-logging-loki-ca-bundle\") pod \"logging-loki-distributor-67c9b4c785-5tqn7\" (UID: \"70ea64fe-b6ed-460c-b37e-fb80fe0420af\") " pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.588227 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70ea64fe-b6ed-460c-b37e-fb80fe0420af-config\") pod \"logging-loki-distributor-67c9b4c785-5tqn7\" (UID: \"70ea64fe-b6ed-460c-b37e-fb80fe0420af\") " pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.588539 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-distributor-http\" (UniqueName: \"kubernetes.io/secret/70ea64fe-b6ed-460c-b37e-fb80fe0420af-logging-loki-distributor-http\") pod \"logging-loki-distributor-67c9b4c785-5tqn7\" (UID: \"70ea64fe-b6ed-460c-b37e-fb80fe0420af\") " pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.625521 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-querier-7454676c57-jl2gl"] Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.626175 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.629099 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-querier-http" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.629301 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-querier-grpc" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.629407 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-s3" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.645258 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-querier-7454676c57-jl2gl"] Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.667609 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pz87s\" (UniqueName: \"kubernetes.io/projected/70ea64fe-b6ed-460c-b37e-fb80fe0420af-kube-api-access-pz87s\") pod \"logging-loki-distributor-67c9b4c785-5tqn7\" (UID: \"70ea64fe-b6ed-460c-b37e-fb80fe0420af\") " pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.673522 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-querier-http\" (UniqueName: \"kubernetes.io/secret/5d4fd159-6ed4-43b0-a478-86029c5648b6-logging-loki-querier-http\") pod \"logging-loki-querier-7454676c57-jl2gl\" (UID: \"5d4fd159-6ed4-43b0-a478-86029c5648b6\") " pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.673576 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-querier-grpc\" (UniqueName: \"kubernetes.io/secret/5d4fd159-6ed4-43b0-a478-86029c5648b6-logging-loki-querier-grpc\") pod \"logging-loki-querier-7454676c57-jl2gl\" (UID: \"5d4fd159-6ed4-43b0-a478-86029c5648b6\") " pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.673594 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5d4fd159-6ed4-43b0-a478-86029c5648b6-logging-loki-ca-bundle\") pod \"logging-loki-querier-7454676c57-jl2gl\" (UID: \"5d4fd159-6ed4-43b0-a478-86029c5648b6\") " pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.673612 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d4fd159-6ed4-43b0-a478-86029c5648b6-config\") pod \"logging-loki-querier-7454676c57-jl2gl\" (UID: \"5d4fd159-6ed4-43b0-a478-86029c5648b6\") " pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.673790 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfvzb\" (UniqueName: \"kubernetes.io/projected/5d4fd159-6ed4-43b0-a478-86029c5648b6-kube-api-access-pfvzb\") pod \"logging-loki-querier-7454676c57-jl2gl\" (UID: \"5d4fd159-6ed4-43b0-a478-86029c5648b6\") " pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.673860 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/5d4fd159-6ed4-43b0-a478-86029c5648b6-logging-loki-s3\") pod \"logging-loki-querier-7454676c57-jl2gl\" (UID: \"5d4fd159-6ed4-43b0-a478-86029c5648b6\") " pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.726875 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr"] Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.727710 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.732097 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-query-frontend-grpc" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.732343 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-query-frontend-http" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.732805 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.744219 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr"] Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.775980 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-querier-http\" (UniqueName: \"kubernetes.io/secret/5d4fd159-6ed4-43b0-a478-86029c5648b6-logging-loki-querier-http\") pod \"logging-loki-querier-7454676c57-jl2gl\" (UID: \"5d4fd159-6ed4-43b0-a478-86029c5648b6\") " pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.776708 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a29afcf0-0d05-4bbc-9f7a-4258b35c43f8-config\") pod \"logging-loki-query-frontend-6b467cdd84-59gpr\" (UID: \"a29afcf0-0d05-4bbc-9f7a-4258b35c43f8\") " pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.776757 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-querier-grpc\" (UniqueName: \"kubernetes.io/secret/5d4fd159-6ed4-43b0-a478-86029c5648b6-logging-loki-querier-grpc\") pod \"logging-loki-querier-7454676c57-jl2gl\" (UID: \"5d4fd159-6ed4-43b0-a478-86029c5648b6\") " pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.776781 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5d4fd159-6ed4-43b0-a478-86029c5648b6-logging-loki-ca-bundle\") pod \"logging-loki-querier-7454676c57-jl2gl\" (UID: \"5d4fd159-6ed4-43b0-a478-86029c5648b6\") " pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.776803 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d4fd159-6ed4-43b0-a478-86029c5648b6-config\") pod \"logging-loki-querier-7454676c57-jl2gl\" (UID: \"5d4fd159-6ed4-43b0-a478-86029c5648b6\") " pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.776835 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfvzb\" (UniqueName: \"kubernetes.io/projected/5d4fd159-6ed4-43b0-a478-86029c5648b6-kube-api-access-pfvzb\") pod \"logging-loki-querier-7454676c57-jl2gl\" (UID: \"5d4fd159-6ed4-43b0-a478-86029c5648b6\") " pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.776920 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/a29afcf0-0d05-4bbc-9f7a-4258b35c43f8-logging-loki-query-frontend-http\") pod \"logging-loki-query-frontend-6b467cdd84-59gpr\" (UID: \"a29afcf0-0d05-4bbc-9f7a-4258b35c43f8\") " pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.776951 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/5d4fd159-6ed4-43b0-a478-86029c5648b6-logging-loki-s3\") pod \"logging-loki-querier-7454676c57-jl2gl\" (UID: \"5d4fd159-6ed4-43b0-a478-86029c5648b6\") " pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.776981 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a29afcf0-0d05-4bbc-9f7a-4258b35c43f8-logging-loki-ca-bundle\") pod \"logging-loki-query-frontend-6b467cdd84-59gpr\" (UID: \"a29afcf0-0d05-4bbc-9f7a-4258b35c43f8\") " pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.777031 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/a29afcf0-0d05-4bbc-9f7a-4258b35c43f8-logging-loki-query-frontend-grpc\") pod \"logging-loki-query-frontend-6b467cdd84-59gpr\" (UID: \"a29afcf0-0d05-4bbc-9f7a-4258b35c43f8\") " pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.777057 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfqv5\" (UniqueName: \"kubernetes.io/projected/a29afcf0-0d05-4bbc-9f7a-4258b35c43f8-kube-api-access-dfqv5\") pod \"logging-loki-query-frontend-6b467cdd84-59gpr\" (UID: \"a29afcf0-0d05-4bbc-9f7a-4258b35c43f8\") " pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.777701 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5d4fd159-6ed4-43b0-a478-86029c5648b6-logging-loki-ca-bundle\") pod \"logging-loki-querier-7454676c57-jl2gl\" (UID: \"5d4fd159-6ed4-43b0-a478-86029c5648b6\") " pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.780190 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-querier-http\" (UniqueName: \"kubernetes.io/secret/5d4fd159-6ed4-43b0-a478-86029c5648b6-logging-loki-querier-http\") pod \"logging-loki-querier-7454676c57-jl2gl\" (UID: \"5d4fd159-6ed4-43b0-a478-86029c5648b6\") " pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.781698 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d4fd159-6ed4-43b0-a478-86029c5648b6-config\") pod \"logging-loki-querier-7454676c57-jl2gl\" (UID: \"5d4fd159-6ed4-43b0-a478-86029c5648b6\") " pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.782065 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/5d4fd159-6ed4-43b0-a478-86029c5648b6-logging-loki-s3\") pod \"logging-loki-querier-7454676c57-jl2gl\" (UID: \"5d4fd159-6ed4-43b0-a478-86029c5648b6\") " pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.821760 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-gateway-575dfb8665-4fwzc"] Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.822649 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.827010 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway-http" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.827475 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-gateway" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.827503 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway-client-http" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.827631 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-gateway-ca-bundle" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.827997 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.834594 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-gateway-575dfb8665-dnzjd"] Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.835491 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.840736 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway-dockercfg-8sdc4" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.845501 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-575dfb8665-4fwzc"] Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.861972 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-575dfb8665-dnzjd"] Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.879084 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a29afcf0-0d05-4bbc-9f7a-4258b35c43f8-config\") pod \"logging-loki-query-frontend-6b467cdd84-59gpr\" (UID: \"a29afcf0-0d05-4bbc-9f7a-4258b35c43f8\") " pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.879149 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/6f1ca034-50e2-49d4-ba08-52e3d91d463c-tenants\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.879179 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/694ba803-44df-4e1d-9236-c84411352efe-lokistack-gateway\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.879198 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/a29afcf0-0d05-4bbc-9f7a-4258b35c43f8-logging-loki-query-frontend-http\") pod \"logging-loki-query-frontend-6b467cdd84-59gpr\" (UID: \"a29afcf0-0d05-4bbc-9f7a-4258b35c43f8\") " pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.879220 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6f1ca034-50e2-49d4-ba08-52e3d91d463c-logging-loki-ca-bundle\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.879279 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/6f1ca034-50e2-49d4-ba08-52e3d91d463c-tls-secret\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.879305 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a29afcf0-0d05-4bbc-9f7a-4258b35c43f8-logging-loki-ca-bundle\") pod \"logging-loki-query-frontend-6b467cdd84-59gpr\" (UID: \"a29afcf0-0d05-4bbc-9f7a-4258b35c43f8\") " pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.879330 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/6f1ca034-50e2-49d4-ba08-52e3d91d463c-lokistack-gateway\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.879350 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6f1ca034-50e2-49d4-ba08-52e3d91d463c-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.880753 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/a29afcf0-0d05-4bbc-9f7a-4258b35c43f8-logging-loki-query-frontend-grpc\") pod \"logging-loki-query-frontend-6b467cdd84-59gpr\" (UID: \"a29afcf0-0d05-4bbc-9f7a-4258b35c43f8\") " pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.880859 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/694ba803-44df-4e1d-9236-c84411352efe-logging-loki-ca-bundle\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.880932 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfqv5\" (UniqueName: \"kubernetes.io/projected/a29afcf0-0d05-4bbc-9f7a-4258b35c43f8-kube-api-access-dfqv5\") pod \"logging-loki-query-frontend-6b467cdd84-59gpr\" (UID: \"a29afcf0-0d05-4bbc-9f7a-4258b35c43f8\") " pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.881006 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rc59g\" (UniqueName: \"kubernetes.io/projected/6f1ca034-50e2-49d4-ba08-52e3d91d463c-kube-api-access-rc59g\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.881083 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/694ba803-44df-4e1d-9236-c84411352efe-tls-secret\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.881149 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/6f1ca034-50e2-49d4-ba08-52e3d91d463c-rbac\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.881215 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/694ba803-44df-4e1d-9236-c84411352efe-rbac\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.881301 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/694ba803-44df-4e1d-9236-c84411352efe-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.881379 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htws8\" (UniqueName: \"kubernetes.io/projected/694ba803-44df-4e1d-9236-c84411352efe-kube-api-access-htws8\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.881460 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/6f1ca034-50e2-49d4-ba08-52e3d91d463c-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.881531 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/694ba803-44df-4e1d-9236-c84411352efe-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.881618 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/694ba803-44df-4e1d-9236-c84411352efe-tenants\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.922205 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-querier-grpc\" (UniqueName: \"kubernetes.io/secret/5d4fd159-6ed4-43b0-a478-86029c5648b6-logging-loki-querier-grpc\") pod \"logging-loki-querier-7454676c57-jl2gl\" (UID: \"5d4fd159-6ed4-43b0-a478-86029c5648b6\") " pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.925398 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a29afcf0-0d05-4bbc-9f7a-4258b35c43f8-config\") pod \"logging-loki-query-frontend-6b467cdd84-59gpr\" (UID: \"a29afcf0-0d05-4bbc-9f7a-4258b35c43f8\") " pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.925722 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a29afcf0-0d05-4bbc-9f7a-4258b35c43f8-logging-loki-ca-bundle\") pod \"logging-loki-query-frontend-6b467cdd84-59gpr\" (UID: \"a29afcf0-0d05-4bbc-9f7a-4258b35c43f8\") " pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.927512 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/a29afcf0-0d05-4bbc-9f7a-4258b35c43f8-logging-loki-query-frontend-grpc\") pod \"logging-loki-query-frontend-6b467cdd84-59gpr\" (UID: \"a29afcf0-0d05-4bbc-9f7a-4258b35c43f8\") " pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.927859 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfqv5\" (UniqueName: \"kubernetes.io/projected/a29afcf0-0d05-4bbc-9f7a-4258b35c43f8-kube-api-access-dfqv5\") pod \"logging-loki-query-frontend-6b467cdd84-59gpr\" (UID: \"a29afcf0-0d05-4bbc-9f7a-4258b35c43f8\") " pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.929357 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfvzb\" (UniqueName: \"kubernetes.io/projected/5d4fd159-6ed4-43b0-a478-86029c5648b6-kube-api-access-pfvzb\") pod \"logging-loki-querier-7454676c57-jl2gl\" (UID: \"5d4fd159-6ed4-43b0-a478-86029c5648b6\") " pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.946477 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.982851 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/6f1ca034-50e2-49d4-ba08-52e3d91d463c-tenants\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.982915 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/694ba803-44df-4e1d-9236-c84411352efe-lokistack-gateway\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.982947 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6f1ca034-50e2-49d4-ba08-52e3d91d463c-logging-loki-ca-bundle\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.982967 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/6f1ca034-50e2-49d4-ba08-52e3d91d463c-tls-secret\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.983002 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/6f1ca034-50e2-49d4-ba08-52e3d91d463c-lokistack-gateway\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.983034 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6f1ca034-50e2-49d4-ba08-52e3d91d463c-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.983067 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/694ba803-44df-4e1d-9236-c84411352efe-logging-loki-ca-bundle\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.983089 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rc59g\" (UniqueName: \"kubernetes.io/projected/6f1ca034-50e2-49d4-ba08-52e3d91d463c-kube-api-access-rc59g\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.983112 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/694ba803-44df-4e1d-9236-c84411352efe-tls-secret\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.983133 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/6f1ca034-50e2-49d4-ba08-52e3d91d463c-rbac\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.983159 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/694ba803-44df-4e1d-9236-c84411352efe-rbac\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.983188 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htws8\" (UniqueName: \"kubernetes.io/projected/694ba803-44df-4e1d-9236-c84411352efe-kube-api-access-htws8\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:32 crc kubenswrapper[4809]: E0930 00:21:32.983198 4809 secret.go:188] Couldn't get secret openshift-logging/logging-loki-gateway-http: secret "logging-loki-gateway-http" not found Sep 30 00:21:32 crc kubenswrapper[4809]: E0930 00:21:32.983290 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f1ca034-50e2-49d4-ba08-52e3d91d463c-tls-secret podName:6f1ca034-50e2-49d4-ba08-52e3d91d463c nodeName:}" failed. No retries permitted until 2025-09-30 00:21:33.483264778 +0000 UTC m=+744.519514266 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-secret" (UniqueName: "kubernetes.io/secret/6f1ca034-50e2-49d4-ba08-52e3d91d463c-tls-secret") pod "logging-loki-gateway-575dfb8665-4fwzc" (UID: "6f1ca034-50e2-49d4-ba08-52e3d91d463c") : secret "logging-loki-gateway-http" not found Sep 30 00:21:32 crc kubenswrapper[4809]: E0930 00:21:32.983551 4809 secret.go:188] Couldn't get secret openshift-logging/logging-loki-gateway-http: secret "logging-loki-gateway-http" not found Sep 30 00:21:32 crc kubenswrapper[4809]: E0930 00:21:32.983618 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/694ba803-44df-4e1d-9236-c84411352efe-tls-secret podName:694ba803-44df-4e1d-9236-c84411352efe nodeName:}" failed. No retries permitted until 2025-09-30 00:21:33.483595607 +0000 UTC m=+744.519845095 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-secret" (UniqueName: "kubernetes.io/secret/694ba803-44df-4e1d-9236-c84411352efe-tls-secret") pod "logging-loki-gateway-575dfb8665-dnzjd" (UID: "694ba803-44df-4e1d-9236-c84411352efe") : secret "logging-loki-gateway-http" not found Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.983212 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/694ba803-44df-4e1d-9236-c84411352efe-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.984873 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/6f1ca034-50e2-49d4-ba08-52e3d91d463c-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.984991 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/694ba803-44df-4e1d-9236-c84411352efe-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:32 crc kubenswrapper[4809]: I0930 00:21:32.985075 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/694ba803-44df-4e1d-9236-c84411352efe-tenants\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.094180 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/a29afcf0-0d05-4bbc-9f7a-4258b35c43f8-logging-loki-query-frontend-http\") pod \"logging-loki-query-frontend-6b467cdd84-59gpr\" (UID: \"a29afcf0-0d05-4bbc-9f7a-4258b35c43f8\") " pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.113671 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6f1ca034-50e2-49d4-ba08-52e3d91d463c-logging-loki-ca-bundle\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.113722 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6f1ca034-50e2-49d4-ba08-52e3d91d463c-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.114084 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/6f1ca034-50e2-49d4-ba08-52e3d91d463c-rbac\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.114165 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/6f1ca034-50e2-49d4-ba08-52e3d91d463c-lokistack-gateway\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.114312 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/694ba803-44df-4e1d-9236-c84411352efe-logging-loki-ca-bundle\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.114335 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/694ba803-44df-4e1d-9236-c84411352efe-lokistack-gateway\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.115254 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/694ba803-44df-4e1d-9236-c84411352efe-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.115923 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/694ba803-44df-4e1d-9236-c84411352efe-rbac\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.116482 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/6f1ca034-50e2-49d4-ba08-52e3d91d463c-tenants\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.117656 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rc59g\" (UniqueName: \"kubernetes.io/projected/6f1ca034-50e2-49d4-ba08-52e3d91d463c-kube-api-access-rc59g\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.117877 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/694ba803-44df-4e1d-9236-c84411352efe-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.118188 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/694ba803-44df-4e1d-9236-c84411352efe-tenants\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.118870 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htws8\" (UniqueName: \"kubernetes.io/projected/694ba803-44df-4e1d-9236-c84411352efe-kube-api-access-htws8\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.120290 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/6f1ca034-50e2-49d4-ba08-52e3d91d463c-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.347959 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.385082 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7"] Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.493351 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/6f1ca034-50e2-49d4-ba08-52e3d91d463c-tls-secret\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.493415 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/694ba803-44df-4e1d-9236-c84411352efe-tls-secret\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.511429 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/694ba803-44df-4e1d-9236-c84411352efe-tls-secret\") pod \"logging-loki-gateway-575dfb8665-dnzjd\" (UID: \"694ba803-44df-4e1d-9236-c84411352efe\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.513244 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/6f1ca034-50e2-49d4-ba08-52e3d91d463c-tls-secret\") pod \"logging-loki-gateway-575dfb8665-4fwzc\" (UID: \"6f1ca034-50e2-49d4-ba08-52e3d91d463c\") " pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.526043 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-querier-7454676c57-jl2gl"] Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.526337 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.526986 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.638949 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-ingester-0"] Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.644954 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.647244 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-ingester-grpc" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.654912 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-ingester-http" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.656948 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-ingester-0"] Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.697814 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-12d4eb83-62f9-4896-8021-ca5ae3795f04\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-12d4eb83-62f9-4896-8021-ca5ae3795f04\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.717392 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-compactor-0"] Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.718198 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.723131 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-compactor-http" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.728076 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-compactor-grpc" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.745717 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr"] Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.752786 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-compactor-0"] Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.787173 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-index-gateway-0"] Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.788291 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.790813 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-index-gateway-0"] Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.791550 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-index-gateway-http" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.791755 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-index-gateway-grpc" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.816268 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f51c82f4-e2f6-4c00-8132-482f9d5b1e90-logging-loki-ca-bundle\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.816984 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/f51c82f4-e2f6-4c00-8132-482f9d5b1e90-logging-loki-s3\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.817051 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c9377566-1f17-4339-8796-087648cd7b7b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c9377566-1f17-4339-8796-087648cd7b7b\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.817114 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-12d4eb83-62f9-4896-8021-ca5ae3795f04\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-12d4eb83-62f9-4896-8021-ca5ae3795f04\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.817151 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rksgb\" (UniqueName: \"kubernetes.io/projected/f51c82f4-e2f6-4c00-8132-482f9d5b1e90-kube-api-access-rksgb\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.817185 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ingester-http\" (UniqueName: \"kubernetes.io/secret/f51c82f4-e2f6-4c00-8132-482f9d5b1e90-logging-loki-ingester-http\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.817209 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f51c82f4-e2f6-4c00-8132-482f9d5b1e90-config\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.817229 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/f51c82f4-e2f6-4c00-8132-482f9d5b1e90-logging-loki-ingester-grpc\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.821892 4809 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.821937 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-12d4eb83-62f9-4896-8021-ca5ae3795f04\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-12d4eb83-62f9-4896-8021-ca5ae3795f04\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/bbf478a159a36288ba8d3743f96913b49fd0eb152896988c7da98d551fad7570/globalmount\"" pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.864568 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-12d4eb83-62f9-4896-8021-ca5ae3795f04\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-12d4eb83-62f9-4896-8021-ca5ae3795f04\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.899693 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-575dfb8665-4fwzc"] Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918197 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38aa55e1-d827-448d-bf0f-11732b946db1-config\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918260 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgm7c\" (UniqueName: \"kubernetes.io/projected/38aa55e1-d827-448d-bf0f-11732b946db1-kube-api-access-pgm7c\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918343 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/38aa55e1-d827-448d-bf0f-11732b946db1-logging-loki-index-gateway-http\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918399 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rksgb\" (UniqueName: \"kubernetes.io/projected/f51c82f4-e2f6-4c00-8132-482f9d5b1e90-kube-api-access-rksgb\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918421 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-compactor-http\" (UniqueName: \"kubernetes.io/secret/2b167b44-b57b-43fe-b959-68c6d9a8dc78-logging-loki-compactor-http\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918471 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/38aa55e1-d827-448d-bf0f-11732b946db1-logging-loki-index-gateway-grpc\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918494 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ingester-http\" (UniqueName: \"kubernetes.io/secret/f51c82f4-e2f6-4c00-8132-482f9d5b1e90-logging-loki-ingester-http\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918529 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/2b167b44-b57b-43fe-b959-68c6d9a8dc78-logging-loki-compactor-grpc\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918557 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f51c82f4-e2f6-4c00-8132-482f9d5b1e90-config\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918588 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/f51c82f4-e2f6-4c00-8132-482f9d5b1e90-logging-loki-ingester-grpc\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918625 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f51c82f4-e2f6-4c00-8132-482f9d5b1e90-logging-loki-ca-bundle\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918689 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b4742950-8ae2-4d41-b137-1f699ec95742\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b4742950-8ae2-4d41-b137-1f699ec95742\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918714 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/f51c82f4-e2f6-4c00-8132-482f9d5b1e90-logging-loki-s3\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918757 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/38aa55e1-d827-448d-bf0f-11732b946db1-logging-loki-s3\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918792 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/2b167b44-b57b-43fe-b959-68c6d9a8dc78-logging-loki-s3\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918819 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlk7x\" (UniqueName: \"kubernetes.io/projected/2b167b44-b57b-43fe-b959-68c6d9a8dc78-kube-api-access-nlk7x\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918837 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-03872c94-e082-4021-8766-9c72b0a79cc1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-03872c94-e082-4021-8766-9c72b0a79cc1\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918858 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c9377566-1f17-4339-8796-087648cd7b7b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c9377566-1f17-4339-8796-087648cd7b7b\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918926 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/38aa55e1-d827-448d-bf0f-11732b946db1-logging-loki-ca-bundle\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918969 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b167b44-b57b-43fe-b959-68c6d9a8dc78-config\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.918995 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b167b44-b57b-43fe-b959-68c6d9a8dc78-logging-loki-ca-bundle\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.919916 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f51c82f4-e2f6-4c00-8132-482f9d5b1e90-config\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.923073 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/f51c82f4-e2f6-4c00-8132-482f9d5b1e90-logging-loki-ingester-grpc\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.923574 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ingester-http\" (UniqueName: \"kubernetes.io/secret/f51c82f4-e2f6-4c00-8132-482f9d5b1e90-logging-loki-ingester-http\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.924257 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/f51c82f4-e2f6-4c00-8132-482f9d5b1e90-logging-loki-s3\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.924277 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f51c82f4-e2f6-4c00-8132-482f9d5b1e90-logging-loki-ca-bundle\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.924401 4809 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.924429 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c9377566-1f17-4339-8796-087648cd7b7b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c9377566-1f17-4339-8796-087648cd7b7b\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/bdee779c221d272ac905284e95f5dd30c3ca48e702263d27b62b899377e37896/globalmount\"" pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.935238 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rksgb\" (UniqueName: \"kubernetes.io/projected/f51c82f4-e2f6-4c00-8132-482f9d5b1e90-kube-api-access-rksgb\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.945710 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c9377566-1f17-4339-8796-087648cd7b7b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c9377566-1f17-4339-8796-087648cd7b7b\") pod \"logging-loki-ingester-0\" (UID: \"f51c82f4-e2f6-4c00-8132-482f9d5b1e90\") " pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:33 crc kubenswrapper[4809]: I0930 00:21:33.981505 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.019996 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b4742950-8ae2-4d41-b137-1f699ec95742\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b4742950-8ae2-4d41-b137-1f699ec95742\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.020066 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/38aa55e1-d827-448d-bf0f-11732b946db1-logging-loki-s3\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.020115 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/2b167b44-b57b-43fe-b959-68c6d9a8dc78-logging-loki-s3\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.020139 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlk7x\" (UniqueName: \"kubernetes.io/projected/2b167b44-b57b-43fe-b959-68c6d9a8dc78-kube-api-access-nlk7x\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.020163 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-03872c94-e082-4021-8766-9c72b0a79cc1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-03872c94-e082-4021-8766-9c72b0a79cc1\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.020201 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/38aa55e1-d827-448d-bf0f-11732b946db1-logging-loki-ca-bundle\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.020232 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b167b44-b57b-43fe-b959-68c6d9a8dc78-config\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.020262 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b167b44-b57b-43fe-b959-68c6d9a8dc78-logging-loki-ca-bundle\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.020299 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38aa55e1-d827-448d-bf0f-11732b946db1-config\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.020319 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgm7c\" (UniqueName: \"kubernetes.io/projected/38aa55e1-d827-448d-bf0f-11732b946db1-kube-api-access-pgm7c\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.020343 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/38aa55e1-d827-448d-bf0f-11732b946db1-logging-loki-index-gateway-http\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.020376 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-compactor-http\" (UniqueName: \"kubernetes.io/secret/2b167b44-b57b-43fe-b959-68c6d9a8dc78-logging-loki-compactor-http\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.021414 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b167b44-b57b-43fe-b959-68c6d9a8dc78-logging-loki-ca-bundle\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.021625 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b167b44-b57b-43fe-b959-68c6d9a8dc78-config\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.021836 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38aa55e1-d827-448d-bf0f-11732b946db1-config\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.021849 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/38aa55e1-d827-448d-bf0f-11732b946db1-logging-loki-ca-bundle\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.022339 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/38aa55e1-d827-448d-bf0f-11732b946db1-logging-loki-index-gateway-grpc\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.022380 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/2b167b44-b57b-43fe-b959-68c6d9a8dc78-logging-loki-compactor-grpc\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.023276 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/2b167b44-b57b-43fe-b959-68c6d9a8dc78-logging-loki-s3\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.023630 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-compactor-http\" (UniqueName: \"kubernetes.io/secret/2b167b44-b57b-43fe-b959-68c6d9a8dc78-logging-loki-compactor-http\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.025479 4809 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.025516 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b4742950-8ae2-4d41-b137-1f699ec95742\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b4742950-8ae2-4d41-b137-1f699ec95742\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/fc8a1f28ab73b9a3b759bdbdb396f1ea3cc6345949a422b808be49a63092ef56/globalmount\"" pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.025740 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/38aa55e1-d827-448d-bf0f-11732b946db1-logging-loki-index-gateway-http\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.025886 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/38aa55e1-d827-448d-bf0f-11732b946db1-logging-loki-index-gateway-grpc\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.026286 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/38aa55e1-d827-448d-bf0f-11732b946db1-logging-loki-s3\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.026282 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/2b167b44-b57b-43fe-b959-68c6d9a8dc78-logging-loki-compactor-grpc\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.026596 4809 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.026618 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-03872c94-e082-4021-8766-9c72b0a79cc1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-03872c94-e082-4021-8766-9c72b0a79cc1\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f403ebfa12b78612d0372edda688797f8678a6c9740304c7d8fa12e191ef3b07/globalmount\"" pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.038790 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlk7x\" (UniqueName: \"kubernetes.io/projected/2b167b44-b57b-43fe-b959-68c6d9a8dc78-kube-api-access-nlk7x\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.049178 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgm7c\" (UniqueName: \"kubernetes.io/projected/38aa55e1-d827-448d-bf0f-11732b946db1-kube-api-access-pgm7c\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.053493 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-03872c94-e082-4021-8766-9c72b0a79cc1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-03872c94-e082-4021-8766-9c72b0a79cc1\") pod \"logging-loki-index-gateway-0\" (UID: \"38aa55e1-d827-448d-bf0f-11732b946db1\") " pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.071384 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b4742950-8ae2-4d41-b137-1f699ec95742\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b4742950-8ae2-4d41-b137-1f699ec95742\") pod \"logging-loki-compactor-0\" (UID: \"2b167b44-b57b-43fe-b959-68c6d9a8dc78\") " pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.089075 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-575dfb8665-dnzjd"] Sep 30 00:21:34 crc kubenswrapper[4809]: W0930 00:21:34.096794 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod694ba803_44df_4e1d_9236_c84411352efe.slice/crio-32cfd791fc482b46774fe191aac5c1e6025142bc7d56402a2cdd8af58a092b41 WatchSource:0}: Error finding container 32cfd791fc482b46774fe191aac5c1e6025142bc7d56402a2cdd8af58a092b41: Status 404 returned error can't find the container with id 32cfd791fc482b46774fe191aac5c1e6025142bc7d56402a2cdd8af58a092b41 Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.106078 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.307254 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-index-gateway-0"] Sep 30 00:21:34 crc kubenswrapper[4809]: W0930 00:21:34.311155 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38aa55e1_d827_448d_bf0f_11732b946db1.slice/crio-1047ea3648de032d7ea2d9c7238309143839edbc776d6c4bb8c647bd29d54f6c WatchSource:0}: Error finding container 1047ea3648de032d7ea2d9c7238309143839edbc776d6c4bb8c647bd29d54f6c: Status 404 returned error can't find the container with id 1047ea3648de032d7ea2d9c7238309143839edbc776d6c4bb8c647bd29d54f6c Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.374534 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.384026 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-ingester-0"] Sep 30 00:21:34 crc kubenswrapper[4809]: W0930 00:21:34.388686 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf51c82f4_e2f6_4c00_8132_482f9d5b1e90.slice/crio-8ac7639fad1cf358cde58534425741c5181bb46d7e851c55465c8da0e1e51198 WatchSource:0}: Error finding container 8ac7639fad1cf358cde58534425741c5181bb46d7e851c55465c8da0e1e51198: Status 404 returned error can't find the container with id 8ac7639fad1cf358cde58534425741c5181bb46d7e851c55465c8da0e1e51198 Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.460625 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" event={"ID":"70ea64fe-b6ed-460c-b37e-fb80fe0420af","Type":"ContainerStarted","Data":"5dd77e505afb5c88b137e79a0c449c50720a8117b2b366032180fbc34ffd1b55"} Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.462676 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-index-gateway-0" event={"ID":"38aa55e1-d827-448d-bf0f-11732b946db1","Type":"ContainerStarted","Data":"1047ea3648de032d7ea2d9c7238309143839edbc776d6c4bb8c647bd29d54f6c"} Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.463958 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" event={"ID":"5d4fd159-6ed4-43b0-a478-86029c5648b6","Type":"ContainerStarted","Data":"009270102a6438594777b881ebc6b519d7564a481b4cddf0e934725bcdf0c881"} Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.464869 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" event={"ID":"a29afcf0-0d05-4bbc-9f7a-4258b35c43f8","Type":"ContainerStarted","Data":"ce09002b7e9e39fed027b95dc6eabdf2773e9a70581da933e02678c9c254d0d5"} Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.466273 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" event={"ID":"694ba803-44df-4e1d-9236-c84411352efe","Type":"ContainerStarted","Data":"32cfd791fc482b46774fe191aac5c1e6025142bc7d56402a2cdd8af58a092b41"} Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.467163 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" event={"ID":"6f1ca034-50e2-49d4-ba08-52e3d91d463c","Type":"ContainerStarted","Data":"1012f977aefbb45f14c3eefe4a3066e6dda41f717f3eb4ee8c1cf06039ad8686"} Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.468149 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-ingester-0" event={"ID":"f51c82f4-e2f6-4c00-8132-482f9d5b1e90","Type":"ContainerStarted","Data":"8ac7639fad1cf358cde58534425741c5181bb46d7e851c55465c8da0e1e51198"} Sep 30 00:21:34 crc kubenswrapper[4809]: I0930 00:21:34.766286 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-compactor-0"] Sep 30 00:21:34 crc kubenswrapper[4809]: W0930 00:21:34.783777 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b167b44_b57b_43fe_b959_68c6d9a8dc78.slice/crio-b238e19abafa35729442ac55a727db815110f0db5d26665dabf0678df39fbf25 WatchSource:0}: Error finding container b238e19abafa35729442ac55a727db815110f0db5d26665dabf0678df39fbf25: Status 404 returned error can't find the container with id b238e19abafa35729442ac55a727db815110f0db5d26665dabf0678df39fbf25 Sep 30 00:21:35 crc kubenswrapper[4809]: I0930 00:21:35.478859 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-compactor-0" event={"ID":"2b167b44-b57b-43fe-b959-68c6d9a8dc78","Type":"ContainerStarted","Data":"b238e19abafa35729442ac55a727db815110f0db5d26665dabf0678df39fbf25"} Sep 30 00:21:36 crc kubenswrapper[4809]: I0930 00:21:36.949061 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lgzpz"] Sep 30 00:21:36 crc kubenswrapper[4809]: I0930 00:21:36.949275 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" podUID="d8339f60-b243-4c2e-b416-86ef26c104a0" containerName="controller-manager" containerID="cri-o://0472183cf47259f6240250057f152f1e3c6b34d9b78ccd9bebcc4a80d5c2c38a" gracePeriod=30 Sep 30 00:21:36 crc kubenswrapper[4809]: I0930 00:21:36.964316 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq"] Sep 30 00:21:36 crc kubenswrapper[4809]: I0930 00:21:36.964721 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" podUID="5211fa59-5683-47eb-bfa9-100d466cd1d5" containerName="route-controller-manager" containerID="cri-o://952f0a909c48c771bd482b6b592c00985ea19a27b08fbb0ade5472bd962f80e1" gracePeriod=30 Sep 30 00:21:37 crc kubenswrapper[4809]: I0930 00:21:37.507546 4809 generic.go:334] "Generic (PLEG): container finished" podID="5211fa59-5683-47eb-bfa9-100d466cd1d5" containerID="952f0a909c48c771bd482b6b592c00985ea19a27b08fbb0ade5472bd962f80e1" exitCode=0 Sep 30 00:21:37 crc kubenswrapper[4809]: I0930 00:21:37.507841 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" event={"ID":"5211fa59-5683-47eb-bfa9-100d466cd1d5","Type":"ContainerDied","Data":"952f0a909c48c771bd482b6b592c00985ea19a27b08fbb0ade5472bd962f80e1"} Sep 30 00:21:37 crc kubenswrapper[4809]: I0930 00:21:37.509865 4809 generic.go:334] "Generic (PLEG): container finished" podID="d8339f60-b243-4c2e-b416-86ef26c104a0" containerID="0472183cf47259f6240250057f152f1e3c6b34d9b78ccd9bebcc4a80d5c2c38a" exitCode=0 Sep 30 00:21:37 crc kubenswrapper[4809]: I0930 00:21:37.509906 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" event={"ID":"d8339f60-b243-4c2e-b416-86ef26c104a0","Type":"ContainerDied","Data":"0472183cf47259f6240250057f152f1e3c6b34d9b78ccd9bebcc4a80d5c2c38a"} Sep 30 00:21:39 crc kubenswrapper[4809]: I0930 00:21:39.581584 4809 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-lgzpz container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Sep 30 00:21:39 crc kubenswrapper[4809]: I0930 00:21:39.582009 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" podUID="d8339f60-b243-4c2e-b416-86ef26c104a0" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Sep 30 00:21:39 crc kubenswrapper[4809]: I0930 00:21:39.788272 4809 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-pzxnq container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Sep 30 00:21:39 crc kubenswrapper[4809]: I0930 00:21:39.788332 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" podUID="5211fa59-5683-47eb-bfa9-100d466cd1d5" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Sep 30 00:21:41 crc kubenswrapper[4809]: I0930 00:21:41.117822 4809 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.842328 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.885245 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5478479c55-g2sf9"] Sep 30 00:21:44 crc kubenswrapper[4809]: E0930 00:21:44.885593 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8339f60-b243-4c2e-b416-86ef26c104a0" containerName="controller-manager" Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.885616 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8339f60-b243-4c2e-b416-86ef26c104a0" containerName="controller-manager" Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.885774 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8339f60-b243-4c2e-b416-86ef26c104a0" containerName="controller-manager" Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.886467 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.889178 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5478479c55-g2sf9"] Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.917908 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d8339f60-b243-4c2e-b416-86ef26c104a0-proxy-ca-bundles\") pod \"d8339f60-b243-4c2e-b416-86ef26c104a0\" (UID: \"d8339f60-b243-4c2e-b416-86ef26c104a0\") " Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.918023 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d8339f60-b243-4c2e-b416-86ef26c104a0-client-ca\") pod \"d8339f60-b243-4c2e-b416-86ef26c104a0\" (UID: \"d8339f60-b243-4c2e-b416-86ef26c104a0\") " Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.918077 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgh4n\" (UniqueName: \"kubernetes.io/projected/d8339f60-b243-4c2e-b416-86ef26c104a0-kube-api-access-kgh4n\") pod \"d8339f60-b243-4c2e-b416-86ef26c104a0\" (UID: \"d8339f60-b243-4c2e-b416-86ef26c104a0\") " Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.918112 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8339f60-b243-4c2e-b416-86ef26c104a0-serving-cert\") pod \"d8339f60-b243-4c2e-b416-86ef26c104a0\" (UID: \"d8339f60-b243-4c2e-b416-86ef26c104a0\") " Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.918170 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8339f60-b243-4c2e-b416-86ef26c104a0-config\") pod \"d8339f60-b243-4c2e-b416-86ef26c104a0\" (UID: \"d8339f60-b243-4c2e-b416-86ef26c104a0\") " Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.918588 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0238339d-30a9-4978-b9de-890e163c27a5-config\") pod \"controller-manager-5478479c55-g2sf9\" (UID: \"0238339d-30a9-4978-b9de-890e163c27a5\") " pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.918635 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0238339d-30a9-4978-b9de-890e163c27a5-serving-cert\") pod \"controller-manager-5478479c55-g2sf9\" (UID: \"0238339d-30a9-4978-b9de-890e163c27a5\") " pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.918693 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0238339d-30a9-4978-b9de-890e163c27a5-proxy-ca-bundles\") pod \"controller-manager-5478479c55-g2sf9\" (UID: \"0238339d-30a9-4978-b9de-890e163c27a5\") " pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.918746 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8nhx\" (UniqueName: \"kubernetes.io/projected/0238339d-30a9-4978-b9de-890e163c27a5-kube-api-access-q8nhx\") pod \"controller-manager-5478479c55-g2sf9\" (UID: \"0238339d-30a9-4978-b9de-890e163c27a5\") " pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.918795 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0238339d-30a9-4978-b9de-890e163c27a5-client-ca\") pod \"controller-manager-5478479c55-g2sf9\" (UID: \"0238339d-30a9-4978-b9de-890e163c27a5\") " pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.918859 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8339f60-b243-4c2e-b416-86ef26c104a0-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "d8339f60-b243-4c2e-b416-86ef26c104a0" (UID: "d8339f60-b243-4c2e-b416-86ef26c104a0"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.919775 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8339f60-b243-4c2e-b416-86ef26c104a0-config" (OuterVolumeSpecName: "config") pod "d8339f60-b243-4c2e-b416-86ef26c104a0" (UID: "d8339f60-b243-4c2e-b416-86ef26c104a0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.920115 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8339f60-b243-4c2e-b416-86ef26c104a0-client-ca" (OuterVolumeSpecName: "client-ca") pod "d8339f60-b243-4c2e-b416-86ef26c104a0" (UID: "d8339f60-b243-4c2e-b416-86ef26c104a0"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.927633 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8339f60-b243-4c2e-b416-86ef26c104a0-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d8339f60-b243-4c2e-b416-86ef26c104a0" (UID: "d8339f60-b243-4c2e-b416-86ef26c104a0"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:21:44 crc kubenswrapper[4809]: I0930 00:21:44.928445 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8339f60-b243-4c2e-b416-86ef26c104a0-kube-api-access-kgh4n" (OuterVolumeSpecName: "kube-api-access-kgh4n") pod "d8339f60-b243-4c2e-b416-86ef26c104a0" (UID: "d8339f60-b243-4c2e-b416-86ef26c104a0"). InnerVolumeSpecName "kube-api-access-kgh4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.020632 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0238339d-30a9-4978-b9de-890e163c27a5-proxy-ca-bundles\") pod \"controller-manager-5478479c55-g2sf9\" (UID: \"0238339d-30a9-4978-b9de-890e163c27a5\") " pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.020715 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8nhx\" (UniqueName: \"kubernetes.io/projected/0238339d-30a9-4978-b9de-890e163c27a5-kube-api-access-q8nhx\") pod \"controller-manager-5478479c55-g2sf9\" (UID: \"0238339d-30a9-4978-b9de-890e163c27a5\") " pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.020759 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0238339d-30a9-4978-b9de-890e163c27a5-client-ca\") pod \"controller-manager-5478479c55-g2sf9\" (UID: \"0238339d-30a9-4978-b9de-890e163c27a5\") " pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.020793 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0238339d-30a9-4978-b9de-890e163c27a5-config\") pod \"controller-manager-5478479c55-g2sf9\" (UID: \"0238339d-30a9-4978-b9de-890e163c27a5\") " pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.020815 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0238339d-30a9-4978-b9de-890e163c27a5-serving-cert\") pod \"controller-manager-5478479c55-g2sf9\" (UID: \"0238339d-30a9-4978-b9de-890e163c27a5\") " pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.020860 4809 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d8339f60-b243-4c2e-b416-86ef26c104a0-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.020870 4809 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d8339f60-b243-4c2e-b416-86ef26c104a0-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.020880 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgh4n\" (UniqueName: \"kubernetes.io/projected/d8339f60-b243-4c2e-b416-86ef26c104a0-kube-api-access-kgh4n\") on node \"crc\" DevicePath \"\"" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.020889 4809 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8339f60-b243-4c2e-b416-86ef26c104a0-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.020898 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8339f60-b243-4c2e-b416-86ef26c104a0-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.022381 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0238339d-30a9-4978-b9de-890e163c27a5-client-ca\") pod \"controller-manager-5478479c55-g2sf9\" (UID: \"0238339d-30a9-4978-b9de-890e163c27a5\") " pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.022629 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0238339d-30a9-4978-b9de-890e163c27a5-proxy-ca-bundles\") pod \"controller-manager-5478479c55-g2sf9\" (UID: \"0238339d-30a9-4978-b9de-890e163c27a5\") " pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.023630 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0238339d-30a9-4978-b9de-890e163c27a5-config\") pod \"controller-manager-5478479c55-g2sf9\" (UID: \"0238339d-30a9-4978-b9de-890e163c27a5\") " pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.024936 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0238339d-30a9-4978-b9de-890e163c27a5-serving-cert\") pod \"controller-manager-5478479c55-g2sf9\" (UID: \"0238339d-30a9-4978-b9de-890e163c27a5\") " pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.037616 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8nhx\" (UniqueName: \"kubernetes.io/projected/0238339d-30a9-4978-b9de-890e163c27a5-kube-api-access-q8nhx\") pod \"controller-manager-5478479c55-g2sf9\" (UID: \"0238339d-30a9-4978-b9de-890e163c27a5\") " pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.072721 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.122014 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5211fa59-5683-47eb-bfa9-100d466cd1d5-serving-cert\") pod \"5211fa59-5683-47eb-bfa9-100d466cd1d5\" (UID: \"5211fa59-5683-47eb-bfa9-100d466cd1d5\") " Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.122078 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8c94\" (UniqueName: \"kubernetes.io/projected/5211fa59-5683-47eb-bfa9-100d466cd1d5-kube-api-access-q8c94\") pod \"5211fa59-5683-47eb-bfa9-100d466cd1d5\" (UID: \"5211fa59-5683-47eb-bfa9-100d466cd1d5\") " Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.122223 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5211fa59-5683-47eb-bfa9-100d466cd1d5-config\") pod \"5211fa59-5683-47eb-bfa9-100d466cd1d5\" (UID: \"5211fa59-5683-47eb-bfa9-100d466cd1d5\") " Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.122262 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5211fa59-5683-47eb-bfa9-100d466cd1d5-client-ca\") pod \"5211fa59-5683-47eb-bfa9-100d466cd1d5\" (UID: \"5211fa59-5683-47eb-bfa9-100d466cd1d5\") " Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.122951 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5211fa59-5683-47eb-bfa9-100d466cd1d5-client-ca" (OuterVolumeSpecName: "client-ca") pod "5211fa59-5683-47eb-bfa9-100d466cd1d5" (UID: "5211fa59-5683-47eb-bfa9-100d466cd1d5"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.123125 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5211fa59-5683-47eb-bfa9-100d466cd1d5-config" (OuterVolumeSpecName: "config") pod "5211fa59-5683-47eb-bfa9-100d466cd1d5" (UID: "5211fa59-5683-47eb-bfa9-100d466cd1d5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.123371 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5211fa59-5683-47eb-bfa9-100d466cd1d5-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.123410 4809 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5211fa59-5683-47eb-bfa9-100d466cd1d5-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.125676 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5211fa59-5683-47eb-bfa9-100d466cd1d5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5211fa59-5683-47eb-bfa9-100d466cd1d5" (UID: "5211fa59-5683-47eb-bfa9-100d466cd1d5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.125782 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5211fa59-5683-47eb-bfa9-100d466cd1d5-kube-api-access-q8c94" (OuterVolumeSpecName: "kube-api-access-q8c94") pod "5211fa59-5683-47eb-bfa9-100d466cd1d5" (UID: "5211fa59-5683-47eb-bfa9-100d466cd1d5"). InnerVolumeSpecName "kube-api-access-q8c94". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.208435 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.228361 4809 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5211fa59-5683-47eb-bfa9-100d466cd1d5-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.228405 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8c94\" (UniqueName: \"kubernetes.io/projected/5211fa59-5683-47eb-bfa9-100d466cd1d5-kube-api-access-q8c94\") on node \"crc\" DevicePath \"\"" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.567688 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" event={"ID":"694ba803-44df-4e1d-9236-c84411352efe","Type":"ContainerStarted","Data":"bab2e6cb4bc04e743810006a9b6457b9881e414c79fb5708009f4b7bd326af63"} Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.575053 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.575047 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq" event={"ID":"5211fa59-5683-47eb-bfa9-100d466cd1d5","Type":"ContainerDied","Data":"d513d62e273c636b752d0c090b68dad3434908eb3d3d7bfeeb50859a980783bf"} Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.575207 4809 scope.go:117] "RemoveContainer" containerID="952f0a909c48c771bd482b6b592c00985ea19a27b08fbb0ade5472bd962f80e1" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.581388 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" event={"ID":"d8339f60-b243-4c2e-b416-86ef26c104a0","Type":"ContainerDied","Data":"2ab3164815907d3a147737f4d4534c6bfea471b452e3a440dd6f481c8647f392"} Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.581486 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-lgzpz" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.588899 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-compactor-0" event={"ID":"2b167b44-b57b-43fe-b959-68c6d9a8dc78","Type":"ContainerStarted","Data":"b80cb1f6d0fc2cf4ecc37629097a4dcb75fc4be21e4bd078b31ce707588ae3f2"} Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.588952 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.595108 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" event={"ID":"a29afcf0-0d05-4bbc-9f7a-4258b35c43f8","Type":"ContainerStarted","Data":"b7efa8ca6142ee52692e4054bad9717d647d3fe06c78aab6c1a3c6b62c7a61db"} Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.595263 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.596962 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-ingester-0" event={"ID":"f51c82f4-e2f6-4c00-8132-482f9d5b1e90","Type":"ContainerStarted","Data":"7ef432ceb6b1374f1ce90c22b3074112542db29e9854f9ae6679007bf4f83926"} Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.597459 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.598789 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-index-gateway-0" event={"ID":"38aa55e1-d827-448d-bf0f-11732b946db1","Type":"ContainerStarted","Data":"fdfa414e71f61231fabe4ed7cf59e3715c24b9821dea5f1e3c2a492b020f7f81"} Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.601821 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.603546 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" event={"ID":"5d4fd159-6ed4-43b0-a478-86029c5648b6","Type":"ContainerStarted","Data":"e1ca35078c2831d7bc6c8fcb08b521d2397c434f557d4e516ee0a42468010f51"} Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.604188 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.605830 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" event={"ID":"6f1ca034-50e2-49d4-ba08-52e3d91d463c","Type":"ContainerStarted","Data":"69e17cad3b45e0c282234fc2063923487f44b4ac2fe8a0cf41c9f5fa2f7843ad"} Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.618475 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-compactor-0" podStartSLOduration=3.19103471 podStartE2EDuration="13.61845313s" podCreationTimestamp="2025-09-30 00:21:32 +0000 UTC" firstStartedPulling="2025-09-30 00:21:34.788890119 +0000 UTC m=+745.825139527" lastFinishedPulling="2025-09-30 00:21:45.216308499 +0000 UTC m=+756.252557947" observedRunningTime="2025-09-30 00:21:45.610764405 +0000 UTC m=+756.647013813" watchObservedRunningTime="2025-09-30 00:21:45.61845313 +0000 UTC m=+756.654702538" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.627290 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" podStartSLOduration=1.927853563 podStartE2EDuration="13.627271017s" podCreationTimestamp="2025-09-30 00:21:32 +0000 UTC" firstStartedPulling="2025-09-30 00:21:33.535383661 +0000 UTC m=+744.571633069" lastFinishedPulling="2025-09-30 00:21:45.234801115 +0000 UTC m=+756.271050523" observedRunningTime="2025-09-30 00:21:45.627025191 +0000 UTC m=+756.663274599" watchObservedRunningTime="2025-09-30 00:21:45.627271017 +0000 UTC m=+756.663520425" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.673358 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" podStartSLOduration=2.11528218 podStartE2EDuration="13.673343142s" podCreationTimestamp="2025-09-30 00:21:32 +0000 UTC" firstStartedPulling="2025-09-30 00:21:33.763523958 +0000 UTC m=+744.799773366" lastFinishedPulling="2025-09-30 00:21:45.32158492 +0000 UTC m=+756.357834328" observedRunningTime="2025-09-30 00:21:45.66882372 +0000 UTC m=+756.705073138" watchObservedRunningTime="2025-09-30 00:21:45.673343142 +0000 UTC m=+756.709592550" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.674309 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-ingester-0" podStartSLOduration=2.883083044 podStartE2EDuration="13.674303667s" podCreationTimestamp="2025-09-30 00:21:32 +0000 UTC" firstStartedPulling="2025-09-30 00:21:34.391396192 +0000 UTC m=+745.427645600" lastFinishedPulling="2025-09-30 00:21:45.182616815 +0000 UTC m=+756.218866223" observedRunningTime="2025-09-30 00:21:45.652475253 +0000 UTC m=+756.688724681" watchObservedRunningTime="2025-09-30 00:21:45.674303667 +0000 UTC m=+756.710553075" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.684011 4809 scope.go:117] "RemoveContainer" containerID="0472183cf47259f6240250057f152f1e3c6b34d9b78ccd9bebcc4a80d5c2c38a" Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.687189 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5478479c55-g2sf9"] Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.689733 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-index-gateway-0" podStartSLOduration=2.767174137 podStartE2EDuration="13.689712671s" podCreationTimestamp="2025-09-30 00:21:32 +0000 UTC" firstStartedPulling="2025-09-30 00:21:34.31299603 +0000 UTC m=+745.349245428" lastFinishedPulling="2025-09-30 00:21:45.235534554 +0000 UTC m=+756.271783962" observedRunningTime="2025-09-30 00:21:45.684194723 +0000 UTC m=+756.720444131" watchObservedRunningTime="2025-09-30 00:21:45.689712671 +0000 UTC m=+756.725962079" Sep 30 00:21:45 crc kubenswrapper[4809]: W0930 00:21:45.696881 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0238339d_30a9_4978_b9de_890e163c27a5.slice/crio-532c8bdb27baef9a034923543ed6f1f206444187d025e179ee2b46ea3eb831cf WatchSource:0}: Error finding container 532c8bdb27baef9a034923543ed6f1f206444187d025e179ee2b46ea3eb831cf: Status 404 returned error can't find the container with id 532c8bdb27baef9a034923543ed6f1f206444187d025e179ee2b46ea3eb831cf Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.699501 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq"] Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.701958 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-pzxnq"] Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.719446 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lgzpz"] Sep 30 00:21:45 crc kubenswrapper[4809]: I0930 00:21:45.724032 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lgzpz"] Sep 30 00:21:46 crc kubenswrapper[4809]: I0930 00:21:46.620970 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" event={"ID":"0238339d-30a9-4978-b9de-890e163c27a5","Type":"ContainerStarted","Data":"64b323b7c78bc7c1a3eac37d46ec2fa963d9ad151ac63d949a99a9d585ccd4a8"} Sep 30 00:21:46 crc kubenswrapper[4809]: I0930 00:21:46.621737 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" event={"ID":"0238339d-30a9-4978-b9de-890e163c27a5","Type":"ContainerStarted","Data":"532c8bdb27baef9a034923543ed6f1f206444187d025e179ee2b46ea3eb831cf"} Sep 30 00:21:46 crc kubenswrapper[4809]: I0930 00:21:46.624272 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" Sep 30 00:21:46 crc kubenswrapper[4809]: I0930 00:21:46.635343 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" Sep 30 00:21:46 crc kubenswrapper[4809]: I0930 00:21:46.640199 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" event={"ID":"70ea64fe-b6ed-460c-b37e-fb80fe0420af","Type":"ContainerStarted","Data":"dbfc4efb6cb8e3d6aa0d3de6e16f60ddf63b46f7b8f5db8b3923c1e9d87facb7"} Sep 30 00:21:46 crc kubenswrapper[4809]: I0930 00:21:46.640356 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" Sep 30 00:21:46 crc kubenswrapper[4809]: I0930 00:21:46.694387 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5478479c55-g2sf9" podStartSLOduration=9.694359967 podStartE2EDuration="9.694359967s" podCreationTimestamp="2025-09-30 00:21:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:21:46.664108206 +0000 UTC m=+757.700357604" watchObservedRunningTime="2025-09-30 00:21:46.694359967 +0000 UTC m=+757.730609375" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.707471 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5211fa59-5683-47eb-bfa9-100d466cd1d5" path="/var/lib/kubelet/pods/5211fa59-5683-47eb-bfa9-100d466cd1d5/volumes" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.708458 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8339f60-b243-4c2e-b416-86ef26c104a0" path="/var/lib/kubelet/pods/d8339f60-b243-4c2e-b416-86ef26c104a0/volumes" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.729358 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" podStartSLOduration=3.767400443 podStartE2EDuration="15.729324225s" podCreationTimestamp="2025-09-30 00:21:32 +0000 UTC" firstStartedPulling="2025-09-30 00:21:33.461236633 +0000 UTC m=+744.497486031" lastFinishedPulling="2025-09-30 00:21:45.423160405 +0000 UTC m=+756.459409813" observedRunningTime="2025-09-30 00:21:46.733473315 +0000 UTC m=+757.769722723" watchObservedRunningTime="2025-09-30 00:21:47.729324225 +0000 UTC m=+758.765573623" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.730475 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh"] Sep 30 00:21:47 crc kubenswrapper[4809]: E0930 00:21:47.730950 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5211fa59-5683-47eb-bfa9-100d466cd1d5" containerName="route-controller-manager" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.730973 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5211fa59-5683-47eb-bfa9-100d466cd1d5" containerName="route-controller-manager" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.731454 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5211fa59-5683-47eb-bfa9-100d466cd1d5" containerName="route-controller-manager" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.732216 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.734970 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.735212 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.735291 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.736238 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.736330 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.736723 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.743683 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh"] Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.893102 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzjkb\" (UniqueName: \"kubernetes.io/projected/6a10b697-4199-48bf-b2d3-db8e08e886e2-kube-api-access-mzjkb\") pod \"route-controller-manager-58f6fd5795-whvjh\" (UID: \"6a10b697-4199-48bf-b2d3-db8e08e886e2\") " pod="openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.893184 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a10b697-4199-48bf-b2d3-db8e08e886e2-config\") pod \"route-controller-manager-58f6fd5795-whvjh\" (UID: \"6a10b697-4199-48bf-b2d3-db8e08e886e2\") " pod="openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.893263 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a10b697-4199-48bf-b2d3-db8e08e886e2-serving-cert\") pod \"route-controller-manager-58f6fd5795-whvjh\" (UID: \"6a10b697-4199-48bf-b2d3-db8e08e886e2\") " pod="openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.893363 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6a10b697-4199-48bf-b2d3-db8e08e886e2-client-ca\") pod \"route-controller-manager-58f6fd5795-whvjh\" (UID: \"6a10b697-4199-48bf-b2d3-db8e08e886e2\") " pod="openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.995615 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6a10b697-4199-48bf-b2d3-db8e08e886e2-client-ca\") pod \"route-controller-manager-58f6fd5795-whvjh\" (UID: \"6a10b697-4199-48bf-b2d3-db8e08e886e2\") " pod="openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.995788 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzjkb\" (UniqueName: \"kubernetes.io/projected/6a10b697-4199-48bf-b2d3-db8e08e886e2-kube-api-access-mzjkb\") pod \"route-controller-manager-58f6fd5795-whvjh\" (UID: \"6a10b697-4199-48bf-b2d3-db8e08e886e2\") " pod="openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.995859 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a10b697-4199-48bf-b2d3-db8e08e886e2-config\") pod \"route-controller-manager-58f6fd5795-whvjh\" (UID: \"6a10b697-4199-48bf-b2d3-db8e08e886e2\") " pod="openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.995954 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a10b697-4199-48bf-b2d3-db8e08e886e2-serving-cert\") pod \"route-controller-manager-58f6fd5795-whvjh\" (UID: \"6a10b697-4199-48bf-b2d3-db8e08e886e2\") " pod="openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.996829 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6a10b697-4199-48bf-b2d3-db8e08e886e2-client-ca\") pod \"route-controller-manager-58f6fd5795-whvjh\" (UID: \"6a10b697-4199-48bf-b2d3-db8e08e886e2\") " pod="openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh" Sep 30 00:21:47 crc kubenswrapper[4809]: I0930 00:21:47.998069 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a10b697-4199-48bf-b2d3-db8e08e886e2-config\") pod \"route-controller-manager-58f6fd5795-whvjh\" (UID: \"6a10b697-4199-48bf-b2d3-db8e08e886e2\") " pod="openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh" Sep 30 00:21:48 crc kubenswrapper[4809]: I0930 00:21:48.006113 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a10b697-4199-48bf-b2d3-db8e08e886e2-serving-cert\") pod \"route-controller-manager-58f6fd5795-whvjh\" (UID: \"6a10b697-4199-48bf-b2d3-db8e08e886e2\") " pod="openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh" Sep 30 00:21:48 crc kubenswrapper[4809]: I0930 00:21:48.022956 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzjkb\" (UniqueName: \"kubernetes.io/projected/6a10b697-4199-48bf-b2d3-db8e08e886e2-kube-api-access-mzjkb\") pod \"route-controller-manager-58f6fd5795-whvjh\" (UID: \"6a10b697-4199-48bf-b2d3-db8e08e886e2\") " pod="openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh" Sep 30 00:21:48 crc kubenswrapper[4809]: I0930 00:21:48.055081 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh" Sep 30 00:21:48 crc kubenswrapper[4809]: I0930 00:21:48.427672 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh"] Sep 30 00:21:48 crc kubenswrapper[4809]: I0930 00:21:48.652994 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" event={"ID":"6f1ca034-50e2-49d4-ba08-52e3d91d463c","Type":"ContainerStarted","Data":"93ef421297948f1f2b51abe1cad87242d5c870b7118b985d87ec8aa45ff8c573"} Sep 30 00:21:48 crc kubenswrapper[4809]: I0930 00:21:48.653440 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:48 crc kubenswrapper[4809]: I0930 00:21:48.653476 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:48 crc kubenswrapper[4809]: I0930 00:21:48.655317 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh" event={"ID":"6a10b697-4199-48bf-b2d3-db8e08e886e2","Type":"ContainerStarted","Data":"e0c98408d90369c036811c28d7027589f026b7cc4e8879f060cfe8b9657d22fa"} Sep 30 00:21:48 crc kubenswrapper[4809]: I0930 00:21:48.655384 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh" event={"ID":"6a10b697-4199-48bf-b2d3-db8e08e886e2","Type":"ContainerStarted","Data":"6fd64e472c4ff3d922f09770456ba079ab4893be523774812eec048a67ec13bc"} Sep 30 00:21:48 crc kubenswrapper[4809]: I0930 00:21:48.655876 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh" Sep 30 00:21:48 crc kubenswrapper[4809]: I0930 00:21:48.659005 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" event={"ID":"694ba803-44df-4e1d-9236-c84411352efe","Type":"ContainerStarted","Data":"4280b6848ef27030030ee63da8c26de9693fec3681c68545de951cf88892d082"} Sep 30 00:21:48 crc kubenswrapper[4809]: I0930 00:21:48.659080 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:48 crc kubenswrapper[4809]: I0930 00:21:48.659312 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:48 crc kubenswrapper[4809]: I0930 00:21:48.677171 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:48 crc kubenswrapper[4809]: I0930 00:21:48.677973 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:48 crc kubenswrapper[4809]: I0930 00:21:48.678202 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" Sep 30 00:21:48 crc kubenswrapper[4809]: I0930 00:21:48.680673 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" Sep 30 00:21:48 crc kubenswrapper[4809]: I0930 00:21:48.688700 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-gateway-575dfb8665-4fwzc" podStartSLOduration=3.011388804 podStartE2EDuration="16.688674956s" podCreationTimestamp="2025-09-30 00:21:32 +0000 UTC" firstStartedPulling="2025-09-30 00:21:33.909324767 +0000 UTC m=+744.945574175" lastFinishedPulling="2025-09-30 00:21:47.586610919 +0000 UTC m=+758.622860327" observedRunningTime="2025-09-30 00:21:48.686296653 +0000 UTC m=+759.722546071" watchObservedRunningTime="2025-09-30 00:21:48.688674956 +0000 UTC m=+759.724924364" Sep 30 00:21:48 crc kubenswrapper[4809]: I0930 00:21:48.769559 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" podStartSLOduration=3.290650982 podStartE2EDuration="16.769529904s" podCreationTimestamp="2025-09-30 00:21:32 +0000 UTC" firstStartedPulling="2025-09-30 00:21:34.099631499 +0000 UTC m=+745.135880907" lastFinishedPulling="2025-09-30 00:21:47.578510431 +0000 UTC m=+758.614759829" observedRunningTime="2025-09-30 00:21:48.768933388 +0000 UTC m=+759.805182796" watchObservedRunningTime="2025-09-30 00:21:48.769529904 +0000 UTC m=+759.805779312" Sep 30 00:21:48 crc kubenswrapper[4809]: I0930 00:21:48.770213 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh" podStartSLOduration=11.770208633 podStartE2EDuration="11.770208633s" podCreationTimestamp="2025-09-30 00:21:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:21:48.738981215 +0000 UTC m=+759.775230623" watchObservedRunningTime="2025-09-30 00:21:48.770208633 +0000 UTC m=+759.806458031" Sep 30 00:21:49 crc kubenswrapper[4809]: I0930 00:21:49.020614 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-58f6fd5795-whvjh" Sep 30 00:21:52 crc kubenswrapper[4809]: I0930 00:21:52.389374 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-c2mvd"] Sep 30 00:21:52 crc kubenswrapper[4809]: I0930 00:21:52.392472 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c2mvd" Sep 30 00:21:52 crc kubenswrapper[4809]: I0930 00:21:52.412619 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c2mvd"] Sep 30 00:21:52 crc kubenswrapper[4809]: I0930 00:21:52.464159 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88f84775-bf8e-4279-9ad9-4d8195b50d04-catalog-content\") pod \"community-operators-c2mvd\" (UID: \"88f84775-bf8e-4279-9ad9-4d8195b50d04\") " pod="openshift-marketplace/community-operators-c2mvd" Sep 30 00:21:52 crc kubenswrapper[4809]: I0930 00:21:52.464289 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88f84775-bf8e-4279-9ad9-4d8195b50d04-utilities\") pod \"community-operators-c2mvd\" (UID: \"88f84775-bf8e-4279-9ad9-4d8195b50d04\") " pod="openshift-marketplace/community-operators-c2mvd" Sep 30 00:21:52 crc kubenswrapper[4809]: I0930 00:21:52.464369 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxqzk\" (UniqueName: \"kubernetes.io/projected/88f84775-bf8e-4279-9ad9-4d8195b50d04-kube-api-access-mxqzk\") pod \"community-operators-c2mvd\" (UID: \"88f84775-bf8e-4279-9ad9-4d8195b50d04\") " pod="openshift-marketplace/community-operators-c2mvd" Sep 30 00:21:52 crc kubenswrapper[4809]: I0930 00:21:52.565702 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxqzk\" (UniqueName: \"kubernetes.io/projected/88f84775-bf8e-4279-9ad9-4d8195b50d04-kube-api-access-mxqzk\") pod \"community-operators-c2mvd\" (UID: \"88f84775-bf8e-4279-9ad9-4d8195b50d04\") " pod="openshift-marketplace/community-operators-c2mvd" Sep 30 00:21:52 crc kubenswrapper[4809]: I0930 00:21:52.565811 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88f84775-bf8e-4279-9ad9-4d8195b50d04-catalog-content\") pod \"community-operators-c2mvd\" (UID: \"88f84775-bf8e-4279-9ad9-4d8195b50d04\") " pod="openshift-marketplace/community-operators-c2mvd" Sep 30 00:21:52 crc kubenswrapper[4809]: I0930 00:21:52.565852 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88f84775-bf8e-4279-9ad9-4d8195b50d04-utilities\") pod \"community-operators-c2mvd\" (UID: \"88f84775-bf8e-4279-9ad9-4d8195b50d04\") " pod="openshift-marketplace/community-operators-c2mvd" Sep 30 00:21:52 crc kubenswrapper[4809]: I0930 00:21:52.567090 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88f84775-bf8e-4279-9ad9-4d8195b50d04-catalog-content\") pod \"community-operators-c2mvd\" (UID: \"88f84775-bf8e-4279-9ad9-4d8195b50d04\") " pod="openshift-marketplace/community-operators-c2mvd" Sep 30 00:21:52 crc kubenswrapper[4809]: I0930 00:21:52.567117 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88f84775-bf8e-4279-9ad9-4d8195b50d04-utilities\") pod \"community-operators-c2mvd\" (UID: \"88f84775-bf8e-4279-9ad9-4d8195b50d04\") " pod="openshift-marketplace/community-operators-c2mvd" Sep 30 00:21:52 crc kubenswrapper[4809]: I0930 00:21:52.592111 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxqzk\" (UniqueName: \"kubernetes.io/projected/88f84775-bf8e-4279-9ad9-4d8195b50d04-kube-api-access-mxqzk\") pod \"community-operators-c2mvd\" (UID: \"88f84775-bf8e-4279-9ad9-4d8195b50d04\") " pod="openshift-marketplace/community-operators-c2mvd" Sep 30 00:21:52 crc kubenswrapper[4809]: I0930 00:21:52.712292 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c2mvd" Sep 30 00:21:53 crc kubenswrapper[4809]: I0930 00:21:53.145899 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-v5wxr"] Sep 30 00:21:53 crc kubenswrapper[4809]: I0930 00:21:53.147173 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v5wxr" Sep 30 00:21:53 crc kubenswrapper[4809]: I0930 00:21:53.157658 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v5wxr"] Sep 30 00:21:53 crc kubenswrapper[4809]: I0930 00:21:53.242108 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c2mvd"] Sep 30 00:21:53 crc kubenswrapper[4809]: I0930 00:21:53.316980 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwmww\" (UniqueName: \"kubernetes.io/projected/3d4090e1-9532-49ff-b8db-edfed098c298-kube-api-access-rwmww\") pod \"redhat-operators-v5wxr\" (UID: \"3d4090e1-9532-49ff-b8db-edfed098c298\") " pod="openshift-marketplace/redhat-operators-v5wxr" Sep 30 00:21:53 crc kubenswrapper[4809]: I0930 00:21:53.317257 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d4090e1-9532-49ff-b8db-edfed098c298-utilities\") pod \"redhat-operators-v5wxr\" (UID: \"3d4090e1-9532-49ff-b8db-edfed098c298\") " pod="openshift-marketplace/redhat-operators-v5wxr" Sep 30 00:21:53 crc kubenswrapper[4809]: I0930 00:21:53.317284 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d4090e1-9532-49ff-b8db-edfed098c298-catalog-content\") pod \"redhat-operators-v5wxr\" (UID: \"3d4090e1-9532-49ff-b8db-edfed098c298\") " pod="openshift-marketplace/redhat-operators-v5wxr" Sep 30 00:21:53 crc kubenswrapper[4809]: I0930 00:21:53.419156 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwmww\" (UniqueName: \"kubernetes.io/projected/3d4090e1-9532-49ff-b8db-edfed098c298-kube-api-access-rwmww\") pod \"redhat-operators-v5wxr\" (UID: \"3d4090e1-9532-49ff-b8db-edfed098c298\") " pod="openshift-marketplace/redhat-operators-v5wxr" Sep 30 00:21:53 crc kubenswrapper[4809]: I0930 00:21:53.419275 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d4090e1-9532-49ff-b8db-edfed098c298-utilities\") pod \"redhat-operators-v5wxr\" (UID: \"3d4090e1-9532-49ff-b8db-edfed098c298\") " pod="openshift-marketplace/redhat-operators-v5wxr" Sep 30 00:21:53 crc kubenswrapper[4809]: I0930 00:21:53.419310 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d4090e1-9532-49ff-b8db-edfed098c298-catalog-content\") pod \"redhat-operators-v5wxr\" (UID: \"3d4090e1-9532-49ff-b8db-edfed098c298\") " pod="openshift-marketplace/redhat-operators-v5wxr" Sep 30 00:21:53 crc kubenswrapper[4809]: I0930 00:21:53.420151 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d4090e1-9532-49ff-b8db-edfed098c298-catalog-content\") pod \"redhat-operators-v5wxr\" (UID: \"3d4090e1-9532-49ff-b8db-edfed098c298\") " pod="openshift-marketplace/redhat-operators-v5wxr" Sep 30 00:21:53 crc kubenswrapper[4809]: I0930 00:21:53.420157 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d4090e1-9532-49ff-b8db-edfed098c298-utilities\") pod \"redhat-operators-v5wxr\" (UID: \"3d4090e1-9532-49ff-b8db-edfed098c298\") " pod="openshift-marketplace/redhat-operators-v5wxr" Sep 30 00:21:53 crc kubenswrapper[4809]: I0930 00:21:53.439139 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwmww\" (UniqueName: \"kubernetes.io/projected/3d4090e1-9532-49ff-b8db-edfed098c298-kube-api-access-rwmww\") pod \"redhat-operators-v5wxr\" (UID: \"3d4090e1-9532-49ff-b8db-edfed098c298\") " pod="openshift-marketplace/redhat-operators-v5wxr" Sep 30 00:21:53 crc kubenswrapper[4809]: I0930 00:21:53.470873 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v5wxr" Sep 30 00:21:53 crc kubenswrapper[4809]: I0930 00:21:53.694996 4809 generic.go:334] "Generic (PLEG): container finished" podID="88f84775-bf8e-4279-9ad9-4d8195b50d04" containerID="d554fc632c87fca7cc1266e18f1b320ca1c91146629eec2af2aa604f68aa7d36" exitCode=0 Sep 30 00:21:53 crc kubenswrapper[4809]: I0930 00:21:53.701985 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c2mvd" event={"ID":"88f84775-bf8e-4279-9ad9-4d8195b50d04","Type":"ContainerDied","Data":"d554fc632c87fca7cc1266e18f1b320ca1c91146629eec2af2aa604f68aa7d36"} Sep 30 00:21:53 crc kubenswrapper[4809]: I0930 00:21:53.702023 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c2mvd" event={"ID":"88f84775-bf8e-4279-9ad9-4d8195b50d04","Type":"ContainerStarted","Data":"66c2ae8e30dc16f48ceb1ea60146b105d526fedf2509f5ee62fcbfd090cc6225"} Sep 30 00:21:53 crc kubenswrapper[4809]: I0930 00:21:53.934509 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v5wxr"] Sep 30 00:21:54 crc kubenswrapper[4809]: I0930 00:21:54.705491 4809 generic.go:334] "Generic (PLEG): container finished" podID="3d4090e1-9532-49ff-b8db-edfed098c298" containerID="550160a98e246740ce626f23cac80c72d6d14c18b13e900de06d38c47d9f4b48" exitCode=0 Sep 30 00:21:54 crc kubenswrapper[4809]: I0930 00:21:54.705563 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v5wxr" event={"ID":"3d4090e1-9532-49ff-b8db-edfed098c298","Type":"ContainerDied","Data":"550160a98e246740ce626f23cac80c72d6d14c18b13e900de06d38c47d9f4b48"} Sep 30 00:21:54 crc kubenswrapper[4809]: I0930 00:21:54.705981 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v5wxr" event={"ID":"3d4090e1-9532-49ff-b8db-edfed098c298","Type":"ContainerStarted","Data":"86ed53417b6eecd49fc8db8f1cdd2ab77679c095d8b881c6e9e6c7f494d882e0"} Sep 30 00:21:54 crc kubenswrapper[4809]: I0930 00:21:54.708864 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c2mvd" event={"ID":"88f84775-bf8e-4279-9ad9-4d8195b50d04","Type":"ContainerStarted","Data":"049bf5ebbe4809e8d8ba79121af298e6b6c5598d4e5f56af226423da64c066c5"} Sep 30 00:21:55 crc kubenswrapper[4809]: I0930 00:21:55.325452 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:21:55 crc kubenswrapper[4809]: I0930 00:21:55.325561 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:21:55 crc kubenswrapper[4809]: I0930 00:21:55.325631 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:21:55 crc kubenswrapper[4809]: I0930 00:21:55.326736 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"165bd6d9351c1f3568d24afda65f12e5fa3c3ab08edb7e15f4eaa480ba979d2d"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:21:55 crc kubenswrapper[4809]: I0930 00:21:55.326876 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://165bd6d9351c1f3568d24afda65f12e5fa3c3ab08edb7e15f4eaa480ba979d2d" gracePeriod=600 Sep 30 00:21:55 crc kubenswrapper[4809]: I0930 00:21:55.716687 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="165bd6d9351c1f3568d24afda65f12e5fa3c3ab08edb7e15f4eaa480ba979d2d" exitCode=0 Sep 30 00:21:55 crc kubenswrapper[4809]: I0930 00:21:55.716688 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"165bd6d9351c1f3568d24afda65f12e5fa3c3ab08edb7e15f4eaa480ba979d2d"} Sep 30 00:21:55 crc kubenswrapper[4809]: I0930 00:21:55.716813 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"1c9f0940c0710f8074f11d7eb6412ad3db197af8b05e861bb012eb4cb786e097"} Sep 30 00:21:55 crc kubenswrapper[4809]: I0930 00:21:55.716844 4809 scope.go:117] "RemoveContainer" containerID="cdeffa51f659e763563b4d5a30aef21919c4d89a6729000221e77d2e14a6b1d2" Sep 30 00:21:55 crc kubenswrapper[4809]: I0930 00:21:55.718408 4809 generic.go:334] "Generic (PLEG): container finished" podID="88f84775-bf8e-4279-9ad9-4d8195b50d04" containerID="049bf5ebbe4809e8d8ba79121af298e6b6c5598d4e5f56af226423da64c066c5" exitCode=0 Sep 30 00:21:55 crc kubenswrapper[4809]: I0930 00:21:55.718438 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c2mvd" event={"ID":"88f84775-bf8e-4279-9ad9-4d8195b50d04","Type":"ContainerDied","Data":"049bf5ebbe4809e8d8ba79121af298e6b6c5598d4e5f56af226423da64c066c5"} Sep 30 00:21:56 crc kubenswrapper[4809]: I0930 00:21:56.728531 4809 generic.go:334] "Generic (PLEG): container finished" podID="3d4090e1-9532-49ff-b8db-edfed098c298" containerID="417d190dcb858ae3a92dc702c0e69211b2926273f4f9698fbd51e9a04ec35b89" exitCode=0 Sep 30 00:21:56 crc kubenswrapper[4809]: I0930 00:21:56.728943 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v5wxr" event={"ID":"3d4090e1-9532-49ff-b8db-edfed098c298","Type":"ContainerDied","Data":"417d190dcb858ae3a92dc702c0e69211b2926273f4f9698fbd51e9a04ec35b89"} Sep 30 00:21:56 crc kubenswrapper[4809]: I0930 00:21:56.736007 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c2mvd" event={"ID":"88f84775-bf8e-4279-9ad9-4d8195b50d04","Type":"ContainerStarted","Data":"99d6777a4e1bcd2047e4d14c1d7f8c6176b8088211a2f6f2f03e6108562ab55e"} Sep 30 00:21:56 crc kubenswrapper[4809]: I0930 00:21:56.797072 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-c2mvd" podStartSLOduration=2.304377199 podStartE2EDuration="4.79703461s" podCreationTimestamp="2025-09-30 00:21:52 +0000 UTC" firstStartedPulling="2025-09-30 00:21:53.698750052 +0000 UTC m=+764.734999470" lastFinishedPulling="2025-09-30 00:21:56.191407473 +0000 UTC m=+767.227656881" observedRunningTime="2025-09-30 00:21:56.790463004 +0000 UTC m=+767.826712422" watchObservedRunningTime="2025-09-30 00:21:56.79703461 +0000 UTC m=+767.833284068" Sep 30 00:21:57 crc kubenswrapper[4809]: I0930 00:21:57.748901 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v5wxr" event={"ID":"3d4090e1-9532-49ff-b8db-edfed098c298","Type":"ContainerStarted","Data":"571511109bec6663d2c97a7db17fa7d5ed5b7be97b5a6a3248afe3c100e38e7c"} Sep 30 00:21:57 crc kubenswrapper[4809]: I0930 00:21:57.771121 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-v5wxr" podStartSLOduration=2.35244325 podStartE2EDuration="4.771104117s" podCreationTimestamp="2025-09-30 00:21:53 +0000 UTC" firstStartedPulling="2025-09-30 00:21:54.707326793 +0000 UTC m=+765.743576201" lastFinishedPulling="2025-09-30 00:21:57.12598766 +0000 UTC m=+768.162237068" observedRunningTime="2025-09-30 00:21:57.768946919 +0000 UTC m=+768.805196347" watchObservedRunningTime="2025-09-30 00:21:57.771104117 +0000 UTC m=+768.807353535" Sep 30 00:22:02 crc kubenswrapper[4809]: I0930 00:22:02.712864 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-c2mvd" Sep 30 00:22:02 crc kubenswrapper[4809]: I0930 00:22:02.713275 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-c2mvd" Sep 30 00:22:02 crc kubenswrapper[4809]: I0930 00:22:02.740010 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-distributor-67c9b4c785-5tqn7" Sep 30 00:22:02 crc kubenswrapper[4809]: I0930 00:22:02.774469 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-c2mvd" Sep 30 00:22:02 crc kubenswrapper[4809]: I0930 00:22:02.832338 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-c2mvd" Sep 30 00:22:02 crc kubenswrapper[4809]: I0930 00:22:02.952776 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-querier-7454676c57-jl2gl" Sep 30 00:22:03 crc kubenswrapper[4809]: I0930 00:22:03.009765 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c2mvd"] Sep 30 00:22:03 crc kubenswrapper[4809]: I0930 00:22:03.358615 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-query-frontend-6b467cdd84-59gpr" Sep 30 00:22:03 crc kubenswrapper[4809]: I0930 00:22:03.471535 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-v5wxr" Sep 30 00:22:03 crc kubenswrapper[4809]: I0930 00:22:03.471585 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-v5wxr" Sep 30 00:22:03 crc kubenswrapper[4809]: I0930 00:22:03.526312 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-v5wxr" Sep 30 00:22:03 crc kubenswrapper[4809]: I0930 00:22:03.858948 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-v5wxr" Sep 30 00:22:03 crc kubenswrapper[4809]: I0930 00:22:03.990583 4809 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: this instance owns no tokens Sep 30 00:22:03 crc kubenswrapper[4809]: I0930 00:22:03.990670 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="f51c82f4-e2f6-4c00-8132-482f9d5b1e90" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Sep 30 00:22:04 crc kubenswrapper[4809]: I0930 00:22:04.117459 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-index-gateway-0" Sep 30 00:22:04 crc kubenswrapper[4809]: I0930 00:22:04.383745 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-compactor-0" Sep 30 00:22:04 crc kubenswrapper[4809]: I0930 00:22:04.799830 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-c2mvd" podUID="88f84775-bf8e-4279-9ad9-4d8195b50d04" containerName="registry-server" containerID="cri-o://99d6777a4e1bcd2047e4d14c1d7f8c6176b8088211a2f6f2f03e6108562ab55e" gracePeriod=2 Sep 30 00:22:05 crc kubenswrapper[4809]: I0930 00:22:05.263742 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c2mvd" Sep 30 00:22:05 crc kubenswrapper[4809]: I0930 00:22:05.417300 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v5wxr"] Sep 30 00:22:05 crc kubenswrapper[4809]: I0930 00:22:05.441623 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88f84775-bf8e-4279-9ad9-4d8195b50d04-utilities\") pod \"88f84775-bf8e-4279-9ad9-4d8195b50d04\" (UID: \"88f84775-bf8e-4279-9ad9-4d8195b50d04\") " Sep 30 00:22:05 crc kubenswrapper[4809]: I0930 00:22:05.441862 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxqzk\" (UniqueName: \"kubernetes.io/projected/88f84775-bf8e-4279-9ad9-4d8195b50d04-kube-api-access-mxqzk\") pod \"88f84775-bf8e-4279-9ad9-4d8195b50d04\" (UID: \"88f84775-bf8e-4279-9ad9-4d8195b50d04\") " Sep 30 00:22:05 crc kubenswrapper[4809]: I0930 00:22:05.441979 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88f84775-bf8e-4279-9ad9-4d8195b50d04-catalog-content\") pod \"88f84775-bf8e-4279-9ad9-4d8195b50d04\" (UID: \"88f84775-bf8e-4279-9ad9-4d8195b50d04\") " Sep 30 00:22:05 crc kubenswrapper[4809]: I0930 00:22:05.443386 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88f84775-bf8e-4279-9ad9-4d8195b50d04-utilities" (OuterVolumeSpecName: "utilities") pod "88f84775-bf8e-4279-9ad9-4d8195b50d04" (UID: "88f84775-bf8e-4279-9ad9-4d8195b50d04"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:22:05 crc kubenswrapper[4809]: I0930 00:22:05.451693 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88f84775-bf8e-4279-9ad9-4d8195b50d04-kube-api-access-mxqzk" (OuterVolumeSpecName: "kube-api-access-mxqzk") pod "88f84775-bf8e-4279-9ad9-4d8195b50d04" (UID: "88f84775-bf8e-4279-9ad9-4d8195b50d04"). InnerVolumeSpecName "kube-api-access-mxqzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:22:05 crc kubenswrapper[4809]: I0930 00:22:05.544299 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88f84775-bf8e-4279-9ad9-4d8195b50d04-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:22:05 crc kubenswrapper[4809]: I0930 00:22:05.544355 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxqzk\" (UniqueName: \"kubernetes.io/projected/88f84775-bf8e-4279-9ad9-4d8195b50d04-kube-api-access-mxqzk\") on node \"crc\" DevicePath \"\"" Sep 30 00:22:05 crc kubenswrapper[4809]: I0930 00:22:05.811592 4809 generic.go:334] "Generic (PLEG): container finished" podID="88f84775-bf8e-4279-9ad9-4d8195b50d04" containerID="99d6777a4e1bcd2047e4d14c1d7f8c6176b8088211a2f6f2f03e6108562ab55e" exitCode=0 Sep 30 00:22:05 crc kubenswrapper[4809]: I0930 00:22:05.811705 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c2mvd" event={"ID":"88f84775-bf8e-4279-9ad9-4d8195b50d04","Type":"ContainerDied","Data":"99d6777a4e1bcd2047e4d14c1d7f8c6176b8088211a2f6f2f03e6108562ab55e"} Sep 30 00:22:05 crc kubenswrapper[4809]: I0930 00:22:05.811764 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c2mvd" Sep 30 00:22:05 crc kubenswrapper[4809]: I0930 00:22:05.811796 4809 scope.go:117] "RemoveContainer" containerID="99d6777a4e1bcd2047e4d14c1d7f8c6176b8088211a2f6f2f03e6108562ab55e" Sep 30 00:22:05 crc kubenswrapper[4809]: I0930 00:22:05.811775 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c2mvd" event={"ID":"88f84775-bf8e-4279-9ad9-4d8195b50d04","Type":"ContainerDied","Data":"66c2ae8e30dc16f48ceb1ea60146b105d526fedf2509f5ee62fcbfd090cc6225"} Sep 30 00:22:05 crc kubenswrapper[4809]: I0930 00:22:05.811961 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-v5wxr" podUID="3d4090e1-9532-49ff-b8db-edfed098c298" containerName="registry-server" containerID="cri-o://571511109bec6663d2c97a7db17fa7d5ed5b7be97b5a6a3248afe3c100e38e7c" gracePeriod=2 Sep 30 00:22:05 crc kubenswrapper[4809]: I0930 00:22:05.846163 4809 scope.go:117] "RemoveContainer" containerID="049bf5ebbe4809e8d8ba79121af298e6b6c5598d4e5f56af226423da64c066c5" Sep 30 00:22:05 crc kubenswrapper[4809]: I0930 00:22:05.885301 4809 scope.go:117] "RemoveContainer" containerID="d554fc632c87fca7cc1266e18f1b320ca1c91146629eec2af2aa604f68aa7d36" Sep 30 00:22:05 crc kubenswrapper[4809]: I0930 00:22:05.902849 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88f84775-bf8e-4279-9ad9-4d8195b50d04-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "88f84775-bf8e-4279-9ad9-4d8195b50d04" (UID: "88f84775-bf8e-4279-9ad9-4d8195b50d04"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:22:05 crc kubenswrapper[4809]: I0930 00:22:05.949505 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88f84775-bf8e-4279-9ad9-4d8195b50d04-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.014557 4809 scope.go:117] "RemoveContainer" containerID="99d6777a4e1bcd2047e4d14c1d7f8c6176b8088211a2f6f2f03e6108562ab55e" Sep 30 00:22:06 crc kubenswrapper[4809]: E0930 00:22:06.015159 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99d6777a4e1bcd2047e4d14c1d7f8c6176b8088211a2f6f2f03e6108562ab55e\": container with ID starting with 99d6777a4e1bcd2047e4d14c1d7f8c6176b8088211a2f6f2f03e6108562ab55e not found: ID does not exist" containerID="99d6777a4e1bcd2047e4d14c1d7f8c6176b8088211a2f6f2f03e6108562ab55e" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.015235 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99d6777a4e1bcd2047e4d14c1d7f8c6176b8088211a2f6f2f03e6108562ab55e"} err="failed to get container status \"99d6777a4e1bcd2047e4d14c1d7f8c6176b8088211a2f6f2f03e6108562ab55e\": rpc error: code = NotFound desc = could not find container \"99d6777a4e1bcd2047e4d14c1d7f8c6176b8088211a2f6f2f03e6108562ab55e\": container with ID starting with 99d6777a4e1bcd2047e4d14c1d7f8c6176b8088211a2f6f2f03e6108562ab55e not found: ID does not exist" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.015279 4809 scope.go:117] "RemoveContainer" containerID="049bf5ebbe4809e8d8ba79121af298e6b6c5598d4e5f56af226423da64c066c5" Sep 30 00:22:06 crc kubenswrapper[4809]: E0930 00:22:06.016036 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"049bf5ebbe4809e8d8ba79121af298e6b6c5598d4e5f56af226423da64c066c5\": container with ID starting with 049bf5ebbe4809e8d8ba79121af298e6b6c5598d4e5f56af226423da64c066c5 not found: ID does not exist" containerID="049bf5ebbe4809e8d8ba79121af298e6b6c5598d4e5f56af226423da64c066c5" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.016097 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"049bf5ebbe4809e8d8ba79121af298e6b6c5598d4e5f56af226423da64c066c5"} err="failed to get container status \"049bf5ebbe4809e8d8ba79121af298e6b6c5598d4e5f56af226423da64c066c5\": rpc error: code = NotFound desc = could not find container \"049bf5ebbe4809e8d8ba79121af298e6b6c5598d4e5f56af226423da64c066c5\": container with ID starting with 049bf5ebbe4809e8d8ba79121af298e6b6c5598d4e5f56af226423da64c066c5 not found: ID does not exist" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.016141 4809 scope.go:117] "RemoveContainer" containerID="d554fc632c87fca7cc1266e18f1b320ca1c91146629eec2af2aa604f68aa7d36" Sep 30 00:22:06 crc kubenswrapper[4809]: E0930 00:22:06.016590 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d554fc632c87fca7cc1266e18f1b320ca1c91146629eec2af2aa604f68aa7d36\": container with ID starting with d554fc632c87fca7cc1266e18f1b320ca1c91146629eec2af2aa604f68aa7d36 not found: ID does not exist" containerID="d554fc632c87fca7cc1266e18f1b320ca1c91146629eec2af2aa604f68aa7d36" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.016631 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d554fc632c87fca7cc1266e18f1b320ca1c91146629eec2af2aa604f68aa7d36"} err="failed to get container status \"d554fc632c87fca7cc1266e18f1b320ca1c91146629eec2af2aa604f68aa7d36\": rpc error: code = NotFound desc = could not find container \"d554fc632c87fca7cc1266e18f1b320ca1c91146629eec2af2aa604f68aa7d36\": container with ID starting with d554fc632c87fca7cc1266e18f1b320ca1c91146629eec2af2aa604f68aa7d36 not found: ID does not exist" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.147773 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c2mvd"] Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.150791 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-c2mvd"] Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.304055 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v5wxr" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.457847 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d4090e1-9532-49ff-b8db-edfed098c298-utilities\") pod \"3d4090e1-9532-49ff-b8db-edfed098c298\" (UID: \"3d4090e1-9532-49ff-b8db-edfed098c298\") " Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.457982 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d4090e1-9532-49ff-b8db-edfed098c298-catalog-content\") pod \"3d4090e1-9532-49ff-b8db-edfed098c298\" (UID: \"3d4090e1-9532-49ff-b8db-edfed098c298\") " Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.458103 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwmww\" (UniqueName: \"kubernetes.io/projected/3d4090e1-9532-49ff-b8db-edfed098c298-kube-api-access-rwmww\") pod \"3d4090e1-9532-49ff-b8db-edfed098c298\" (UID: \"3d4090e1-9532-49ff-b8db-edfed098c298\") " Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.459491 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d4090e1-9532-49ff-b8db-edfed098c298-utilities" (OuterVolumeSpecName: "utilities") pod "3d4090e1-9532-49ff-b8db-edfed098c298" (UID: "3d4090e1-9532-49ff-b8db-edfed098c298"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.463006 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d4090e1-9532-49ff-b8db-edfed098c298-kube-api-access-rwmww" (OuterVolumeSpecName: "kube-api-access-rwmww") pod "3d4090e1-9532-49ff-b8db-edfed098c298" (UID: "3d4090e1-9532-49ff-b8db-edfed098c298"). InnerVolumeSpecName "kube-api-access-rwmww". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.560375 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwmww\" (UniqueName: \"kubernetes.io/projected/3d4090e1-9532-49ff-b8db-edfed098c298-kube-api-access-rwmww\") on node \"crc\" DevicePath \"\"" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.560413 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d4090e1-9532-49ff-b8db-edfed098c298-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.819271 4809 generic.go:334] "Generic (PLEG): container finished" podID="3d4090e1-9532-49ff-b8db-edfed098c298" containerID="571511109bec6663d2c97a7db17fa7d5ed5b7be97b5a6a3248afe3c100e38e7c" exitCode=0 Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.819330 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v5wxr" event={"ID":"3d4090e1-9532-49ff-b8db-edfed098c298","Type":"ContainerDied","Data":"571511109bec6663d2c97a7db17fa7d5ed5b7be97b5a6a3248afe3c100e38e7c"} Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.819569 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v5wxr" event={"ID":"3d4090e1-9532-49ff-b8db-edfed098c298","Type":"ContainerDied","Data":"86ed53417b6eecd49fc8db8f1cdd2ab77679c095d8b881c6e9e6c7f494d882e0"} Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.819592 4809 scope.go:117] "RemoveContainer" containerID="571511109bec6663d2c97a7db17fa7d5ed5b7be97b5a6a3248afe3c100e38e7c" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.819941 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v5wxr" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.840097 4809 scope.go:117] "RemoveContainer" containerID="417d190dcb858ae3a92dc702c0e69211b2926273f4f9698fbd51e9a04ec35b89" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.856104 4809 scope.go:117] "RemoveContainer" containerID="550160a98e246740ce626f23cac80c72d6d14c18b13e900de06d38c47d9f4b48" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.877858 4809 scope.go:117] "RemoveContainer" containerID="571511109bec6663d2c97a7db17fa7d5ed5b7be97b5a6a3248afe3c100e38e7c" Sep 30 00:22:06 crc kubenswrapper[4809]: E0930 00:22:06.879033 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"571511109bec6663d2c97a7db17fa7d5ed5b7be97b5a6a3248afe3c100e38e7c\": container with ID starting with 571511109bec6663d2c97a7db17fa7d5ed5b7be97b5a6a3248afe3c100e38e7c not found: ID does not exist" containerID="571511109bec6663d2c97a7db17fa7d5ed5b7be97b5a6a3248afe3c100e38e7c" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.879303 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"571511109bec6663d2c97a7db17fa7d5ed5b7be97b5a6a3248afe3c100e38e7c"} err="failed to get container status \"571511109bec6663d2c97a7db17fa7d5ed5b7be97b5a6a3248afe3c100e38e7c\": rpc error: code = NotFound desc = could not find container \"571511109bec6663d2c97a7db17fa7d5ed5b7be97b5a6a3248afe3c100e38e7c\": container with ID starting with 571511109bec6663d2c97a7db17fa7d5ed5b7be97b5a6a3248afe3c100e38e7c not found: ID does not exist" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.879437 4809 scope.go:117] "RemoveContainer" containerID="417d190dcb858ae3a92dc702c0e69211b2926273f4f9698fbd51e9a04ec35b89" Sep 30 00:22:06 crc kubenswrapper[4809]: E0930 00:22:06.880286 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"417d190dcb858ae3a92dc702c0e69211b2926273f4f9698fbd51e9a04ec35b89\": container with ID starting with 417d190dcb858ae3a92dc702c0e69211b2926273f4f9698fbd51e9a04ec35b89 not found: ID does not exist" containerID="417d190dcb858ae3a92dc702c0e69211b2926273f4f9698fbd51e9a04ec35b89" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.880355 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"417d190dcb858ae3a92dc702c0e69211b2926273f4f9698fbd51e9a04ec35b89"} err="failed to get container status \"417d190dcb858ae3a92dc702c0e69211b2926273f4f9698fbd51e9a04ec35b89\": rpc error: code = NotFound desc = could not find container \"417d190dcb858ae3a92dc702c0e69211b2926273f4f9698fbd51e9a04ec35b89\": container with ID starting with 417d190dcb858ae3a92dc702c0e69211b2926273f4f9698fbd51e9a04ec35b89 not found: ID does not exist" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.880387 4809 scope.go:117] "RemoveContainer" containerID="550160a98e246740ce626f23cac80c72d6d14c18b13e900de06d38c47d9f4b48" Sep 30 00:22:06 crc kubenswrapper[4809]: E0930 00:22:06.880734 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"550160a98e246740ce626f23cac80c72d6d14c18b13e900de06d38c47d9f4b48\": container with ID starting with 550160a98e246740ce626f23cac80c72d6d14c18b13e900de06d38c47d9f4b48 not found: ID does not exist" containerID="550160a98e246740ce626f23cac80c72d6d14c18b13e900de06d38c47d9f4b48" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.880755 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"550160a98e246740ce626f23cac80c72d6d14c18b13e900de06d38c47d9f4b48"} err="failed to get container status \"550160a98e246740ce626f23cac80c72d6d14c18b13e900de06d38c47d9f4b48\": rpc error: code = NotFound desc = could not find container \"550160a98e246740ce626f23cac80c72d6d14c18b13e900de06d38c47d9f4b48\": container with ID starting with 550160a98e246740ce626f23cac80c72d6d14c18b13e900de06d38c47d9f4b48 not found: ID does not exist" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.887342 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d4090e1-9532-49ff-b8db-edfed098c298-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3d4090e1-9532-49ff-b8db-edfed098c298" (UID: "3d4090e1-9532-49ff-b8db-edfed098c298"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:22:06 crc kubenswrapper[4809]: I0930 00:22:06.966028 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d4090e1-9532-49ff-b8db-edfed098c298-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:22:07 crc kubenswrapper[4809]: I0930 00:22:07.151504 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v5wxr"] Sep 30 00:22:07 crc kubenswrapper[4809]: I0930 00:22:07.156850 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-v5wxr"] Sep 30 00:22:07 crc kubenswrapper[4809]: I0930 00:22:07.705929 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d4090e1-9532-49ff-b8db-edfed098c298" path="/var/lib/kubelet/pods/3d4090e1-9532-49ff-b8db-edfed098c298/volumes" Sep 30 00:22:07 crc kubenswrapper[4809]: I0930 00:22:07.707477 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88f84775-bf8e-4279-9ad9-4d8195b50d04" path="/var/lib/kubelet/pods/88f84775-bf8e-4279-9ad9-4d8195b50d04/volumes" Sep 30 00:22:10 crc kubenswrapper[4809]: I0930 00:22:10.921392 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-42dtq"] Sep 30 00:22:10 crc kubenswrapper[4809]: E0930 00:22:10.922277 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d4090e1-9532-49ff-b8db-edfed098c298" containerName="registry-server" Sep 30 00:22:10 crc kubenswrapper[4809]: I0930 00:22:10.922293 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d4090e1-9532-49ff-b8db-edfed098c298" containerName="registry-server" Sep 30 00:22:10 crc kubenswrapper[4809]: E0930 00:22:10.922309 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d4090e1-9532-49ff-b8db-edfed098c298" containerName="extract-utilities" Sep 30 00:22:10 crc kubenswrapper[4809]: I0930 00:22:10.922317 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d4090e1-9532-49ff-b8db-edfed098c298" containerName="extract-utilities" Sep 30 00:22:10 crc kubenswrapper[4809]: E0930 00:22:10.922329 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88f84775-bf8e-4279-9ad9-4d8195b50d04" containerName="extract-utilities" Sep 30 00:22:10 crc kubenswrapper[4809]: I0930 00:22:10.922339 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="88f84775-bf8e-4279-9ad9-4d8195b50d04" containerName="extract-utilities" Sep 30 00:22:10 crc kubenswrapper[4809]: E0930 00:22:10.922365 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d4090e1-9532-49ff-b8db-edfed098c298" containerName="extract-content" Sep 30 00:22:10 crc kubenswrapper[4809]: I0930 00:22:10.922376 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d4090e1-9532-49ff-b8db-edfed098c298" containerName="extract-content" Sep 30 00:22:10 crc kubenswrapper[4809]: E0930 00:22:10.922385 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88f84775-bf8e-4279-9ad9-4d8195b50d04" containerName="registry-server" Sep 30 00:22:10 crc kubenswrapper[4809]: I0930 00:22:10.922392 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="88f84775-bf8e-4279-9ad9-4d8195b50d04" containerName="registry-server" Sep 30 00:22:10 crc kubenswrapper[4809]: E0930 00:22:10.922402 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88f84775-bf8e-4279-9ad9-4d8195b50d04" containerName="extract-content" Sep 30 00:22:10 crc kubenswrapper[4809]: I0930 00:22:10.922408 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="88f84775-bf8e-4279-9ad9-4d8195b50d04" containerName="extract-content" Sep 30 00:22:10 crc kubenswrapper[4809]: I0930 00:22:10.922582 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="88f84775-bf8e-4279-9ad9-4d8195b50d04" containerName="registry-server" Sep 30 00:22:10 crc kubenswrapper[4809]: I0930 00:22:10.922594 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d4090e1-9532-49ff-b8db-edfed098c298" containerName="registry-server" Sep 30 00:22:10 crc kubenswrapper[4809]: I0930 00:22:10.923855 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-42dtq" Sep 30 00:22:10 crc kubenswrapper[4809]: I0930 00:22:10.925169 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-42dtq"] Sep 30 00:22:11 crc kubenswrapper[4809]: I0930 00:22:11.025810 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a-utilities\") pod \"redhat-marketplace-42dtq\" (UID: \"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a\") " pod="openshift-marketplace/redhat-marketplace-42dtq" Sep 30 00:22:11 crc kubenswrapper[4809]: I0930 00:22:11.025878 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a-catalog-content\") pod \"redhat-marketplace-42dtq\" (UID: \"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a\") " pod="openshift-marketplace/redhat-marketplace-42dtq" Sep 30 00:22:11 crc kubenswrapper[4809]: I0930 00:22:11.025910 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6cgs\" (UniqueName: \"kubernetes.io/projected/eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a-kube-api-access-k6cgs\") pod \"redhat-marketplace-42dtq\" (UID: \"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a\") " pod="openshift-marketplace/redhat-marketplace-42dtq" Sep 30 00:22:11 crc kubenswrapper[4809]: I0930 00:22:11.126691 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a-utilities\") pod \"redhat-marketplace-42dtq\" (UID: \"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a\") " pod="openshift-marketplace/redhat-marketplace-42dtq" Sep 30 00:22:11 crc kubenswrapper[4809]: I0930 00:22:11.126737 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a-catalog-content\") pod \"redhat-marketplace-42dtq\" (UID: \"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a\") " pod="openshift-marketplace/redhat-marketplace-42dtq" Sep 30 00:22:11 crc kubenswrapper[4809]: I0930 00:22:11.126758 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6cgs\" (UniqueName: \"kubernetes.io/projected/eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a-kube-api-access-k6cgs\") pod \"redhat-marketplace-42dtq\" (UID: \"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a\") " pod="openshift-marketplace/redhat-marketplace-42dtq" Sep 30 00:22:11 crc kubenswrapper[4809]: I0930 00:22:11.127486 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a-utilities\") pod \"redhat-marketplace-42dtq\" (UID: \"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a\") " pod="openshift-marketplace/redhat-marketplace-42dtq" Sep 30 00:22:11 crc kubenswrapper[4809]: I0930 00:22:11.127782 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a-catalog-content\") pod \"redhat-marketplace-42dtq\" (UID: \"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a\") " pod="openshift-marketplace/redhat-marketplace-42dtq" Sep 30 00:22:11 crc kubenswrapper[4809]: I0930 00:22:11.145511 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6cgs\" (UniqueName: \"kubernetes.io/projected/eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a-kube-api-access-k6cgs\") pod \"redhat-marketplace-42dtq\" (UID: \"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a\") " pod="openshift-marketplace/redhat-marketplace-42dtq" Sep 30 00:22:11 crc kubenswrapper[4809]: I0930 00:22:11.252833 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-42dtq" Sep 30 00:22:11 crc kubenswrapper[4809]: I0930 00:22:11.681448 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-42dtq"] Sep 30 00:22:11 crc kubenswrapper[4809]: I0930 00:22:11.861190 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-42dtq" event={"ID":"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a","Type":"ContainerStarted","Data":"b006a5baed5140b5619e014e77b7589b4963cb46ed6b36eadddec34572282f97"} Sep 30 00:22:12 crc kubenswrapper[4809]: I0930 00:22:12.872596 4809 generic.go:334] "Generic (PLEG): container finished" podID="eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a" containerID="507e5cb577a257eda8e5f5c0a58d8a522697a272aaddfa30396cc809bb5864f8" exitCode=0 Sep 30 00:22:12 crc kubenswrapper[4809]: I0930 00:22:12.872716 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-42dtq" event={"ID":"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a","Type":"ContainerDied","Data":"507e5cb577a257eda8e5f5c0a58d8a522697a272aaddfa30396cc809bb5864f8"} Sep 30 00:22:13 crc kubenswrapper[4809]: I0930 00:22:13.883207 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-42dtq" event={"ID":"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a","Type":"ContainerStarted","Data":"a473c91add28942b96f9a739502cbbcdbc870d30339649ce97c1fa7f4bb4801e"} Sep 30 00:22:13 crc kubenswrapper[4809]: I0930 00:22:13.986383 4809 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: this instance owns no tokens Sep 30 00:22:13 crc kubenswrapper[4809]: I0930 00:22:13.986440 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="f51c82f4-e2f6-4c00-8132-482f9d5b1e90" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Sep 30 00:22:14 crc kubenswrapper[4809]: I0930 00:22:14.894965 4809 generic.go:334] "Generic (PLEG): container finished" podID="eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a" containerID="a473c91add28942b96f9a739502cbbcdbc870d30339649ce97c1fa7f4bb4801e" exitCode=0 Sep 30 00:22:14 crc kubenswrapper[4809]: I0930 00:22:14.895034 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-42dtq" event={"ID":"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a","Type":"ContainerDied","Data":"a473c91add28942b96f9a739502cbbcdbc870d30339649ce97c1fa7f4bb4801e"} Sep 30 00:22:15 crc kubenswrapper[4809]: I0930 00:22:15.902731 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-42dtq" event={"ID":"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a","Type":"ContainerStarted","Data":"3feed0b19d4519db035793d5db95c0833045510176204d36d20deac2aa916248"} Sep 30 00:22:15 crc kubenswrapper[4809]: I0930 00:22:15.923475 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-42dtq" podStartSLOduration=3.297834346 podStartE2EDuration="5.923459696s" podCreationTimestamp="2025-09-30 00:22:10 +0000 UTC" firstStartedPulling="2025-09-30 00:22:12.874416801 +0000 UTC m=+783.910666229" lastFinishedPulling="2025-09-30 00:22:15.500042161 +0000 UTC m=+786.536291579" observedRunningTime="2025-09-30 00:22:15.922492667 +0000 UTC m=+786.958742075" watchObservedRunningTime="2025-09-30 00:22:15.923459696 +0000 UTC m=+786.959709104" Sep 30 00:22:21 crc kubenswrapper[4809]: I0930 00:22:21.253843 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-42dtq" Sep 30 00:22:21 crc kubenswrapper[4809]: I0930 00:22:21.254273 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-42dtq" Sep 30 00:22:21 crc kubenswrapper[4809]: I0930 00:22:21.295082 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-42dtq" Sep 30 00:22:22 crc kubenswrapper[4809]: I0930 00:22:22.020280 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-42dtq" Sep 30 00:22:22 crc kubenswrapper[4809]: I0930 00:22:22.091625 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-42dtq"] Sep 30 00:22:23 crc kubenswrapper[4809]: I0930 00:22:23.969707 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-42dtq" podUID="eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a" containerName="registry-server" containerID="cri-o://3feed0b19d4519db035793d5db95c0833045510176204d36d20deac2aa916248" gracePeriod=2 Sep 30 00:22:23 crc kubenswrapper[4809]: I0930 00:22:23.991279 4809 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: waiting for 15s after being ready Sep 30 00:22:23 crc kubenswrapper[4809]: I0930 00:22:23.991361 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="f51c82f4-e2f6-4c00-8132-482f9d5b1e90" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Sep 30 00:22:24 crc kubenswrapper[4809]: I0930 00:22:24.426338 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-42dtq" Sep 30 00:22:24 crc kubenswrapper[4809]: I0930 00:22:24.527448 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a-catalog-content\") pod \"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a\" (UID: \"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a\") " Sep 30 00:22:24 crc kubenswrapper[4809]: I0930 00:22:24.527591 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a-utilities\") pod \"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a\" (UID: \"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a\") " Sep 30 00:22:24 crc kubenswrapper[4809]: I0930 00:22:24.527684 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6cgs\" (UniqueName: \"kubernetes.io/projected/eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a-kube-api-access-k6cgs\") pod \"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a\" (UID: \"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a\") " Sep 30 00:22:24 crc kubenswrapper[4809]: I0930 00:22:24.528973 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a-utilities" (OuterVolumeSpecName: "utilities") pod "eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a" (UID: "eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:22:24 crc kubenswrapper[4809]: I0930 00:22:24.533162 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a-kube-api-access-k6cgs" (OuterVolumeSpecName: "kube-api-access-k6cgs") pod "eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a" (UID: "eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a"). InnerVolumeSpecName "kube-api-access-k6cgs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:22:24 crc kubenswrapper[4809]: I0930 00:22:24.539392 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a" (UID: "eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:22:24 crc kubenswrapper[4809]: I0930 00:22:24.629951 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:22:24 crc kubenswrapper[4809]: I0930 00:22:24.630079 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:22:24 crc kubenswrapper[4809]: I0930 00:22:24.630097 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6cgs\" (UniqueName: \"kubernetes.io/projected/eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a-kube-api-access-k6cgs\") on node \"crc\" DevicePath \"\"" Sep 30 00:22:24 crc kubenswrapper[4809]: I0930 00:22:24.983836 4809 generic.go:334] "Generic (PLEG): container finished" podID="eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a" containerID="3feed0b19d4519db035793d5db95c0833045510176204d36d20deac2aa916248" exitCode=0 Sep 30 00:22:24 crc kubenswrapper[4809]: I0930 00:22:24.983894 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-42dtq" event={"ID":"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a","Type":"ContainerDied","Data":"3feed0b19d4519db035793d5db95c0833045510176204d36d20deac2aa916248"} Sep 30 00:22:24 crc kubenswrapper[4809]: I0930 00:22:24.983934 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-42dtq" event={"ID":"eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a","Type":"ContainerDied","Data":"b006a5baed5140b5619e014e77b7589b4963cb46ed6b36eadddec34572282f97"} Sep 30 00:22:24 crc kubenswrapper[4809]: I0930 00:22:24.983956 4809 scope.go:117] "RemoveContainer" containerID="3feed0b19d4519db035793d5db95c0833045510176204d36d20deac2aa916248" Sep 30 00:22:24 crc kubenswrapper[4809]: I0930 00:22:24.983972 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-42dtq" Sep 30 00:22:25 crc kubenswrapper[4809]: I0930 00:22:25.014781 4809 scope.go:117] "RemoveContainer" containerID="a473c91add28942b96f9a739502cbbcdbc870d30339649ce97c1fa7f4bb4801e" Sep 30 00:22:25 crc kubenswrapper[4809]: I0930 00:22:25.037202 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-42dtq"] Sep 30 00:22:25 crc kubenswrapper[4809]: I0930 00:22:25.043199 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-42dtq"] Sep 30 00:22:25 crc kubenswrapper[4809]: I0930 00:22:25.062936 4809 scope.go:117] "RemoveContainer" containerID="507e5cb577a257eda8e5f5c0a58d8a522697a272aaddfa30396cc809bb5864f8" Sep 30 00:22:25 crc kubenswrapper[4809]: I0930 00:22:25.082428 4809 scope.go:117] "RemoveContainer" containerID="3feed0b19d4519db035793d5db95c0833045510176204d36d20deac2aa916248" Sep 30 00:22:25 crc kubenswrapper[4809]: E0930 00:22:25.082937 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3feed0b19d4519db035793d5db95c0833045510176204d36d20deac2aa916248\": container with ID starting with 3feed0b19d4519db035793d5db95c0833045510176204d36d20deac2aa916248 not found: ID does not exist" containerID="3feed0b19d4519db035793d5db95c0833045510176204d36d20deac2aa916248" Sep 30 00:22:25 crc kubenswrapper[4809]: I0930 00:22:25.082974 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3feed0b19d4519db035793d5db95c0833045510176204d36d20deac2aa916248"} err="failed to get container status \"3feed0b19d4519db035793d5db95c0833045510176204d36d20deac2aa916248\": rpc error: code = NotFound desc = could not find container \"3feed0b19d4519db035793d5db95c0833045510176204d36d20deac2aa916248\": container with ID starting with 3feed0b19d4519db035793d5db95c0833045510176204d36d20deac2aa916248 not found: ID does not exist" Sep 30 00:22:25 crc kubenswrapper[4809]: I0930 00:22:25.083000 4809 scope.go:117] "RemoveContainer" containerID="a473c91add28942b96f9a739502cbbcdbc870d30339649ce97c1fa7f4bb4801e" Sep 30 00:22:25 crc kubenswrapper[4809]: E0930 00:22:25.083505 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a473c91add28942b96f9a739502cbbcdbc870d30339649ce97c1fa7f4bb4801e\": container with ID starting with a473c91add28942b96f9a739502cbbcdbc870d30339649ce97c1fa7f4bb4801e not found: ID does not exist" containerID="a473c91add28942b96f9a739502cbbcdbc870d30339649ce97c1fa7f4bb4801e" Sep 30 00:22:25 crc kubenswrapper[4809]: I0930 00:22:25.083535 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a473c91add28942b96f9a739502cbbcdbc870d30339649ce97c1fa7f4bb4801e"} err="failed to get container status \"a473c91add28942b96f9a739502cbbcdbc870d30339649ce97c1fa7f4bb4801e\": rpc error: code = NotFound desc = could not find container \"a473c91add28942b96f9a739502cbbcdbc870d30339649ce97c1fa7f4bb4801e\": container with ID starting with a473c91add28942b96f9a739502cbbcdbc870d30339649ce97c1fa7f4bb4801e not found: ID does not exist" Sep 30 00:22:25 crc kubenswrapper[4809]: I0930 00:22:25.083553 4809 scope.go:117] "RemoveContainer" containerID="507e5cb577a257eda8e5f5c0a58d8a522697a272aaddfa30396cc809bb5864f8" Sep 30 00:22:25 crc kubenswrapper[4809]: E0930 00:22:25.084491 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"507e5cb577a257eda8e5f5c0a58d8a522697a272aaddfa30396cc809bb5864f8\": container with ID starting with 507e5cb577a257eda8e5f5c0a58d8a522697a272aaddfa30396cc809bb5864f8 not found: ID does not exist" containerID="507e5cb577a257eda8e5f5c0a58d8a522697a272aaddfa30396cc809bb5864f8" Sep 30 00:22:25 crc kubenswrapper[4809]: I0930 00:22:25.084580 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"507e5cb577a257eda8e5f5c0a58d8a522697a272aaddfa30396cc809bb5864f8"} err="failed to get container status \"507e5cb577a257eda8e5f5c0a58d8a522697a272aaddfa30396cc809bb5864f8\": rpc error: code = NotFound desc = could not find container \"507e5cb577a257eda8e5f5c0a58d8a522697a272aaddfa30396cc809bb5864f8\": container with ID starting with 507e5cb577a257eda8e5f5c0a58d8a522697a272aaddfa30396cc809bb5864f8 not found: ID does not exist" Sep 30 00:22:25 crc kubenswrapper[4809]: I0930 00:22:25.708055 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a" path="/var/lib/kubelet/pods/eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a/volumes" Sep 30 00:22:33 crc kubenswrapper[4809]: I0930 00:22:33.989751 4809 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: waiting for 15s after being ready Sep 30 00:22:33 crc kubenswrapper[4809]: I0930 00:22:33.990265 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="f51c82f4-e2f6-4c00-8132-482f9d5b1e90" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Sep 30 00:22:39 crc kubenswrapper[4809]: I0930 00:22:39.491448 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-82qww"] Sep 30 00:22:39 crc kubenswrapper[4809]: E0930 00:22:39.492056 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a" containerName="extract-utilities" Sep 30 00:22:39 crc kubenswrapper[4809]: I0930 00:22:39.492080 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a" containerName="extract-utilities" Sep 30 00:22:39 crc kubenswrapper[4809]: E0930 00:22:39.492114 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a" containerName="extract-content" Sep 30 00:22:39 crc kubenswrapper[4809]: I0930 00:22:39.492127 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a" containerName="extract-content" Sep 30 00:22:39 crc kubenswrapper[4809]: E0930 00:22:39.492150 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a" containerName="registry-server" Sep 30 00:22:39 crc kubenswrapper[4809]: I0930 00:22:39.492163 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a" containerName="registry-server" Sep 30 00:22:39 crc kubenswrapper[4809]: I0930 00:22:39.492351 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="eff5e9e0-5cf3-4e76-bc77-9e05ffa73d2a" containerName="registry-server" Sep 30 00:22:39 crc kubenswrapper[4809]: I0930 00:22:39.493741 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-82qww" Sep 30 00:22:39 crc kubenswrapper[4809]: I0930 00:22:39.506603 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-82qww"] Sep 30 00:22:39 crc kubenswrapper[4809]: I0930 00:22:39.558140 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4eaf061f-9969-4b73-a918-b315528a2b6e-catalog-content\") pod \"certified-operators-82qww\" (UID: \"4eaf061f-9969-4b73-a918-b315528a2b6e\") " pod="openshift-marketplace/certified-operators-82qww" Sep 30 00:22:39 crc kubenswrapper[4809]: I0930 00:22:39.558225 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4eaf061f-9969-4b73-a918-b315528a2b6e-utilities\") pod \"certified-operators-82qww\" (UID: \"4eaf061f-9969-4b73-a918-b315528a2b6e\") " pod="openshift-marketplace/certified-operators-82qww" Sep 30 00:22:39 crc kubenswrapper[4809]: I0930 00:22:39.558509 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rk8dx\" (UniqueName: \"kubernetes.io/projected/4eaf061f-9969-4b73-a918-b315528a2b6e-kube-api-access-rk8dx\") pod \"certified-operators-82qww\" (UID: \"4eaf061f-9969-4b73-a918-b315528a2b6e\") " pod="openshift-marketplace/certified-operators-82qww" Sep 30 00:22:39 crc kubenswrapper[4809]: I0930 00:22:39.659665 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rk8dx\" (UniqueName: \"kubernetes.io/projected/4eaf061f-9969-4b73-a918-b315528a2b6e-kube-api-access-rk8dx\") pod \"certified-operators-82qww\" (UID: \"4eaf061f-9969-4b73-a918-b315528a2b6e\") " pod="openshift-marketplace/certified-operators-82qww" Sep 30 00:22:39 crc kubenswrapper[4809]: I0930 00:22:39.659857 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4eaf061f-9969-4b73-a918-b315528a2b6e-catalog-content\") pod \"certified-operators-82qww\" (UID: \"4eaf061f-9969-4b73-a918-b315528a2b6e\") " pod="openshift-marketplace/certified-operators-82qww" Sep 30 00:22:39 crc kubenswrapper[4809]: I0930 00:22:39.659901 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4eaf061f-9969-4b73-a918-b315528a2b6e-utilities\") pod \"certified-operators-82qww\" (UID: \"4eaf061f-9969-4b73-a918-b315528a2b6e\") " pod="openshift-marketplace/certified-operators-82qww" Sep 30 00:22:39 crc kubenswrapper[4809]: I0930 00:22:39.660440 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4eaf061f-9969-4b73-a918-b315528a2b6e-catalog-content\") pod \"certified-operators-82qww\" (UID: \"4eaf061f-9969-4b73-a918-b315528a2b6e\") " pod="openshift-marketplace/certified-operators-82qww" Sep 30 00:22:39 crc kubenswrapper[4809]: I0930 00:22:39.660533 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4eaf061f-9969-4b73-a918-b315528a2b6e-utilities\") pod \"certified-operators-82qww\" (UID: \"4eaf061f-9969-4b73-a918-b315528a2b6e\") " pod="openshift-marketplace/certified-operators-82qww" Sep 30 00:22:39 crc kubenswrapper[4809]: I0930 00:22:39.677979 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rk8dx\" (UniqueName: \"kubernetes.io/projected/4eaf061f-9969-4b73-a918-b315528a2b6e-kube-api-access-rk8dx\") pod \"certified-operators-82qww\" (UID: \"4eaf061f-9969-4b73-a918-b315528a2b6e\") " pod="openshift-marketplace/certified-operators-82qww" Sep 30 00:22:39 crc kubenswrapper[4809]: I0930 00:22:39.865158 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-82qww" Sep 30 00:22:40 crc kubenswrapper[4809]: I0930 00:22:40.092435 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-82qww"] Sep 30 00:22:41 crc kubenswrapper[4809]: I0930 00:22:41.117473 4809 generic.go:334] "Generic (PLEG): container finished" podID="4eaf061f-9969-4b73-a918-b315528a2b6e" containerID="714c754b4fc20063382aa83f331efa5afc87ce3e65cdd606547db5691c3ae9d4" exitCode=0 Sep 30 00:22:41 crc kubenswrapper[4809]: I0930 00:22:41.117767 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-82qww" event={"ID":"4eaf061f-9969-4b73-a918-b315528a2b6e","Type":"ContainerDied","Data":"714c754b4fc20063382aa83f331efa5afc87ce3e65cdd606547db5691c3ae9d4"} Sep 30 00:22:41 crc kubenswrapper[4809]: I0930 00:22:41.117793 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-82qww" event={"ID":"4eaf061f-9969-4b73-a918-b315528a2b6e","Type":"ContainerStarted","Data":"07dbdcc27f5c551b252ed35cf25718cf5e6d78f1d8d8015ad48b2d466b7fe12f"} Sep 30 00:22:43 crc kubenswrapper[4809]: I0930 00:22:43.129591 4809 generic.go:334] "Generic (PLEG): container finished" podID="4eaf061f-9969-4b73-a918-b315528a2b6e" containerID="d9b038f593c41e42704606f804546d97e21954cbc004383461e8749de431fcba" exitCode=0 Sep 30 00:22:43 crc kubenswrapper[4809]: I0930 00:22:43.129762 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-82qww" event={"ID":"4eaf061f-9969-4b73-a918-b315528a2b6e","Type":"ContainerDied","Data":"d9b038f593c41e42704606f804546d97e21954cbc004383461e8749de431fcba"} Sep 30 00:22:43 crc kubenswrapper[4809]: I0930 00:22:43.986447 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-ingester-0" Sep 30 00:22:44 crc kubenswrapper[4809]: I0930 00:22:44.137120 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-82qww" event={"ID":"4eaf061f-9969-4b73-a918-b315528a2b6e","Type":"ContainerStarted","Data":"2516c0c6cf7759b5759d6ced627c088e061c2ce8099101448ef3ad2b93c2a55a"} Sep 30 00:22:44 crc kubenswrapper[4809]: I0930 00:22:44.157769 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-82qww" podStartSLOduration=2.738605433 podStartE2EDuration="5.157746294s" podCreationTimestamp="2025-09-30 00:22:39 +0000 UTC" firstStartedPulling="2025-09-30 00:22:41.119880112 +0000 UTC m=+812.156129560" lastFinishedPulling="2025-09-30 00:22:43.539020973 +0000 UTC m=+814.575270421" observedRunningTime="2025-09-30 00:22:44.152873867 +0000 UTC m=+815.189123275" watchObservedRunningTime="2025-09-30 00:22:44.157746294 +0000 UTC m=+815.193995752" Sep 30 00:22:49 crc kubenswrapper[4809]: I0930 00:22:49.865536 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-82qww" Sep 30 00:22:49 crc kubenswrapper[4809]: I0930 00:22:49.865719 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-82qww" Sep 30 00:22:49 crc kubenswrapper[4809]: I0930 00:22:49.913225 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-82qww" Sep 30 00:22:50 crc kubenswrapper[4809]: I0930 00:22:50.228876 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-82qww" Sep 30 00:22:50 crc kubenswrapper[4809]: I0930 00:22:50.269117 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-82qww"] Sep 30 00:22:52 crc kubenswrapper[4809]: I0930 00:22:52.195998 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-82qww" podUID="4eaf061f-9969-4b73-a918-b315528a2b6e" containerName="registry-server" containerID="cri-o://2516c0c6cf7759b5759d6ced627c088e061c2ce8099101448ef3ad2b93c2a55a" gracePeriod=2 Sep 30 00:22:52 crc kubenswrapper[4809]: I0930 00:22:52.584913 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-82qww" Sep 30 00:22:52 crc kubenswrapper[4809]: I0930 00:22:52.756331 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4eaf061f-9969-4b73-a918-b315528a2b6e-catalog-content\") pod \"4eaf061f-9969-4b73-a918-b315528a2b6e\" (UID: \"4eaf061f-9969-4b73-a918-b315528a2b6e\") " Sep 30 00:22:52 crc kubenswrapper[4809]: I0930 00:22:52.756399 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4eaf061f-9969-4b73-a918-b315528a2b6e-utilities\") pod \"4eaf061f-9969-4b73-a918-b315528a2b6e\" (UID: \"4eaf061f-9969-4b73-a918-b315528a2b6e\") " Sep 30 00:22:52 crc kubenswrapper[4809]: I0930 00:22:52.756522 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rk8dx\" (UniqueName: \"kubernetes.io/projected/4eaf061f-9969-4b73-a918-b315528a2b6e-kube-api-access-rk8dx\") pod \"4eaf061f-9969-4b73-a918-b315528a2b6e\" (UID: \"4eaf061f-9969-4b73-a918-b315528a2b6e\") " Sep 30 00:22:52 crc kubenswrapper[4809]: I0930 00:22:52.757984 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4eaf061f-9969-4b73-a918-b315528a2b6e-utilities" (OuterVolumeSpecName: "utilities") pod "4eaf061f-9969-4b73-a918-b315528a2b6e" (UID: "4eaf061f-9969-4b73-a918-b315528a2b6e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:22:52 crc kubenswrapper[4809]: I0930 00:22:52.768915 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4eaf061f-9969-4b73-a918-b315528a2b6e-kube-api-access-rk8dx" (OuterVolumeSpecName: "kube-api-access-rk8dx") pod "4eaf061f-9969-4b73-a918-b315528a2b6e" (UID: "4eaf061f-9969-4b73-a918-b315528a2b6e"). InnerVolumeSpecName "kube-api-access-rk8dx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:22:52 crc kubenswrapper[4809]: I0930 00:22:52.859328 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4eaf061f-9969-4b73-a918-b315528a2b6e-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:22:52 crc kubenswrapper[4809]: I0930 00:22:52.859371 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rk8dx\" (UniqueName: \"kubernetes.io/projected/4eaf061f-9969-4b73-a918-b315528a2b6e-kube-api-access-rk8dx\") on node \"crc\" DevicePath \"\"" Sep 30 00:22:52 crc kubenswrapper[4809]: I0930 00:22:52.989927 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4eaf061f-9969-4b73-a918-b315528a2b6e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4eaf061f-9969-4b73-a918-b315528a2b6e" (UID: "4eaf061f-9969-4b73-a918-b315528a2b6e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.062422 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4eaf061f-9969-4b73-a918-b315528a2b6e-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.204792 4809 generic.go:334] "Generic (PLEG): container finished" podID="4eaf061f-9969-4b73-a918-b315528a2b6e" containerID="2516c0c6cf7759b5759d6ced627c088e061c2ce8099101448ef3ad2b93c2a55a" exitCode=0 Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.204854 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-82qww" event={"ID":"4eaf061f-9969-4b73-a918-b315528a2b6e","Type":"ContainerDied","Data":"2516c0c6cf7759b5759d6ced627c088e061c2ce8099101448ef3ad2b93c2a55a"} Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.204904 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-82qww" event={"ID":"4eaf061f-9969-4b73-a918-b315528a2b6e","Type":"ContainerDied","Data":"07dbdcc27f5c551b252ed35cf25718cf5e6d78f1d8d8015ad48b2d466b7fe12f"} Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.204909 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-82qww" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.204934 4809 scope.go:117] "RemoveContainer" containerID="2516c0c6cf7759b5759d6ced627c088e061c2ce8099101448ef3ad2b93c2a55a" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.224951 4809 scope.go:117] "RemoveContainer" containerID="d9b038f593c41e42704606f804546d97e21954cbc004383461e8749de431fcba" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.247742 4809 scope.go:117] "RemoveContainer" containerID="714c754b4fc20063382aa83f331efa5afc87ce3e65cdd606547db5691c3ae9d4" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.297660 4809 scope.go:117] "RemoveContainer" containerID="2516c0c6cf7759b5759d6ced627c088e061c2ce8099101448ef3ad2b93c2a55a" Sep 30 00:22:53 crc kubenswrapper[4809]: E0930 00:22:53.298443 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2516c0c6cf7759b5759d6ced627c088e061c2ce8099101448ef3ad2b93c2a55a\": container with ID starting with 2516c0c6cf7759b5759d6ced627c088e061c2ce8099101448ef3ad2b93c2a55a not found: ID does not exist" containerID="2516c0c6cf7759b5759d6ced627c088e061c2ce8099101448ef3ad2b93c2a55a" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.298502 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2516c0c6cf7759b5759d6ced627c088e061c2ce8099101448ef3ad2b93c2a55a"} err="failed to get container status \"2516c0c6cf7759b5759d6ced627c088e061c2ce8099101448ef3ad2b93c2a55a\": rpc error: code = NotFound desc = could not find container \"2516c0c6cf7759b5759d6ced627c088e061c2ce8099101448ef3ad2b93c2a55a\": container with ID starting with 2516c0c6cf7759b5759d6ced627c088e061c2ce8099101448ef3ad2b93c2a55a not found: ID does not exist" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.298536 4809 scope.go:117] "RemoveContainer" containerID="d9b038f593c41e42704606f804546d97e21954cbc004383461e8749de431fcba" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.298689 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-82qww"] Sep 30 00:22:53 crc kubenswrapper[4809]: E0930 00:22:53.299032 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9b038f593c41e42704606f804546d97e21954cbc004383461e8749de431fcba\": container with ID starting with d9b038f593c41e42704606f804546d97e21954cbc004383461e8749de431fcba not found: ID does not exist" containerID="d9b038f593c41e42704606f804546d97e21954cbc004383461e8749de431fcba" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.299068 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9b038f593c41e42704606f804546d97e21954cbc004383461e8749de431fcba"} err="failed to get container status \"d9b038f593c41e42704606f804546d97e21954cbc004383461e8749de431fcba\": rpc error: code = NotFound desc = could not find container \"d9b038f593c41e42704606f804546d97e21954cbc004383461e8749de431fcba\": container with ID starting with d9b038f593c41e42704606f804546d97e21954cbc004383461e8749de431fcba not found: ID does not exist" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.299093 4809 scope.go:117] "RemoveContainer" containerID="714c754b4fc20063382aa83f331efa5afc87ce3e65cdd606547db5691c3ae9d4" Sep 30 00:22:53 crc kubenswrapper[4809]: E0930 00:22:53.299482 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"714c754b4fc20063382aa83f331efa5afc87ce3e65cdd606547db5691c3ae9d4\": container with ID starting with 714c754b4fc20063382aa83f331efa5afc87ce3e65cdd606547db5691c3ae9d4 not found: ID does not exist" containerID="714c754b4fc20063382aa83f331efa5afc87ce3e65cdd606547db5691c3ae9d4" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.299521 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"714c754b4fc20063382aa83f331efa5afc87ce3e65cdd606547db5691c3ae9d4"} err="failed to get container status \"714c754b4fc20063382aa83f331efa5afc87ce3e65cdd606547db5691c3ae9d4\": rpc error: code = NotFound desc = could not find container \"714c754b4fc20063382aa83f331efa5afc87ce3e65cdd606547db5691c3ae9d4\": container with ID starting with 714c754b4fc20063382aa83f331efa5afc87ce3e65cdd606547db5691c3ae9d4 not found: ID does not exist" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.304794 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-82qww"] Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.533129 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/collector-lf4d2"] Sep 30 00:22:53 crc kubenswrapper[4809]: E0930 00:22:53.533449 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eaf061f-9969-4b73-a918-b315528a2b6e" containerName="extract-content" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.533465 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eaf061f-9969-4b73-a918-b315528a2b6e" containerName="extract-content" Sep 30 00:22:53 crc kubenswrapper[4809]: E0930 00:22:53.533488 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eaf061f-9969-4b73-a918-b315528a2b6e" containerName="registry-server" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.533499 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eaf061f-9969-4b73-a918-b315528a2b6e" containerName="registry-server" Sep 30 00:22:53 crc kubenswrapper[4809]: E0930 00:22:53.533524 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eaf061f-9969-4b73-a918-b315528a2b6e" containerName="extract-utilities" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.533534 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eaf061f-9969-4b73-a918-b315528a2b6e" containerName="extract-utilities" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.533734 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="4eaf061f-9969-4b73-a918-b315528a2b6e" containerName="registry-server" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.534472 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.541437 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-metrics" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.542330 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-syslog-receiver" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.542505 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-token" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.544224 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-dockercfg-r5rf2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.552094 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-trustbundle" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.553743 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-config" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.563719 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-lf4d2"] Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.671609 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-metrics\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.671948 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-collector-token\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.671977 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-collector-syslog-receiver\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.671996 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-config\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.672011 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-sa-token\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.672028 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-trusted-ca\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.672048 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-tmp\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.672063 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-config-openshift-service-cacrt\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.672242 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-entrypoint\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.672283 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98vtl\" (UniqueName: \"kubernetes.io/projected/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-kube-api-access-98vtl\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.672510 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-datadir\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.700019 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4eaf061f-9969-4b73-a918-b315528a2b6e" path="/var/lib/kubelet/pods/4eaf061f-9969-4b73-a918-b315528a2b6e/volumes" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.774381 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-collector-syslog-receiver\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.774424 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-config\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.774444 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-sa-token\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.774459 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-trusted-ca\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.774482 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-config-openshift-service-cacrt\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.774501 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-tmp\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.774542 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-entrypoint\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.774562 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98vtl\" (UniqueName: \"kubernetes.io/projected/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-kube-api-access-98vtl\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.774608 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-datadir\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.774628 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-metrics\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.774699 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-collector-token\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.775301 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-datadir\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.775331 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-config\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.775404 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-entrypoint\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.775798 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-config-openshift-service-cacrt\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.776291 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-trusted-ca\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.778478 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-collector-syslog-receiver\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.778822 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-tmp\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.779244 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-metrics\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.781172 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-collector-token\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.796846 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98vtl\" (UniqueName: \"kubernetes.io/projected/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-kube-api-access-98vtl\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.798536 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/7567296a-f03a-4732-8c0c-ab0a5f6a2acb-sa-token\") pod \"collector-lf4d2\" (UID: \"7567296a-f03a-4732-8c0c-ab0a5f6a2acb\") " pod="openshift-logging/collector-lf4d2" Sep 30 00:22:53 crc kubenswrapper[4809]: I0930 00:22:53.871292 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-lf4d2" Sep 30 00:22:54 crc kubenswrapper[4809]: I0930 00:22:54.262700 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-lf4d2"] Sep 30 00:22:55 crc kubenswrapper[4809]: I0930 00:22:55.218976 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/collector-lf4d2" event={"ID":"7567296a-f03a-4732-8c0c-ab0a5f6a2acb","Type":"ContainerStarted","Data":"d7703071e28bcef69a511806f8a287c46a62bda28675179530f82b864871b18a"} Sep 30 00:23:00 crc kubenswrapper[4809]: I0930 00:23:00.280594 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/collector-lf4d2" event={"ID":"7567296a-f03a-4732-8c0c-ab0a5f6a2acb","Type":"ContainerStarted","Data":"eecc6bd9f4a25fd8b0edc5653999e965a9d8297aa80a23639994bc62dec8addd"} Sep 30 00:23:00 crc kubenswrapper[4809]: I0930 00:23:00.305354 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/collector-lf4d2" podStartSLOduration=1.7099283010000002 podStartE2EDuration="7.305331825s" podCreationTimestamp="2025-09-30 00:22:53 +0000 UTC" firstStartedPulling="2025-09-30 00:22:54.274586167 +0000 UTC m=+825.310835575" lastFinishedPulling="2025-09-30 00:22:59.869989681 +0000 UTC m=+830.906239099" observedRunningTime="2025-09-30 00:23:00.304546754 +0000 UTC m=+831.340796182" watchObservedRunningTime="2025-09-30 00:23:00.305331825 +0000 UTC m=+831.341581253" Sep 30 00:23:11 crc kubenswrapper[4809]: I0930 00:23:11.597073 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m"] Sep 30 00:23:11 crc kubenswrapper[4809]: I0930 00:23:11.599131 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m" Sep 30 00:23:11 crc kubenswrapper[4809]: I0930 00:23:11.606443 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m"] Sep 30 00:23:11 crc kubenswrapper[4809]: I0930 00:23:11.606575 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 00:23:11 crc kubenswrapper[4809]: I0930 00:23:11.670614 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d64a9837-eab6-471a-a17e-cff4f9ed5a06-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m\" (UID: \"d64a9837-eab6-471a-a17e-cff4f9ed5a06\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m" Sep 30 00:23:11 crc kubenswrapper[4809]: I0930 00:23:11.670715 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d64a9837-eab6-471a-a17e-cff4f9ed5a06-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m\" (UID: \"d64a9837-eab6-471a-a17e-cff4f9ed5a06\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m" Sep 30 00:23:11 crc kubenswrapper[4809]: I0930 00:23:11.670750 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4g8dq\" (UniqueName: \"kubernetes.io/projected/d64a9837-eab6-471a-a17e-cff4f9ed5a06-kube-api-access-4g8dq\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m\" (UID: \"d64a9837-eab6-471a-a17e-cff4f9ed5a06\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m" Sep 30 00:23:11 crc kubenswrapper[4809]: I0930 00:23:11.772335 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d64a9837-eab6-471a-a17e-cff4f9ed5a06-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m\" (UID: \"d64a9837-eab6-471a-a17e-cff4f9ed5a06\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m" Sep 30 00:23:11 crc kubenswrapper[4809]: I0930 00:23:11.772435 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d64a9837-eab6-471a-a17e-cff4f9ed5a06-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m\" (UID: \"d64a9837-eab6-471a-a17e-cff4f9ed5a06\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m" Sep 30 00:23:11 crc kubenswrapper[4809]: I0930 00:23:11.772497 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4g8dq\" (UniqueName: \"kubernetes.io/projected/d64a9837-eab6-471a-a17e-cff4f9ed5a06-kube-api-access-4g8dq\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m\" (UID: \"d64a9837-eab6-471a-a17e-cff4f9ed5a06\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m" Sep 30 00:23:11 crc kubenswrapper[4809]: I0930 00:23:11.773116 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d64a9837-eab6-471a-a17e-cff4f9ed5a06-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m\" (UID: \"d64a9837-eab6-471a-a17e-cff4f9ed5a06\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m" Sep 30 00:23:11 crc kubenswrapper[4809]: I0930 00:23:11.773212 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d64a9837-eab6-471a-a17e-cff4f9ed5a06-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m\" (UID: \"d64a9837-eab6-471a-a17e-cff4f9ed5a06\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m" Sep 30 00:23:11 crc kubenswrapper[4809]: I0930 00:23:11.792720 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4g8dq\" (UniqueName: \"kubernetes.io/projected/d64a9837-eab6-471a-a17e-cff4f9ed5a06-kube-api-access-4g8dq\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m\" (UID: \"d64a9837-eab6-471a-a17e-cff4f9ed5a06\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m" Sep 30 00:23:11 crc kubenswrapper[4809]: I0930 00:23:11.920180 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m" Sep 30 00:23:12 crc kubenswrapper[4809]: I0930 00:23:12.403440 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m"] Sep 30 00:23:13 crc kubenswrapper[4809]: I0930 00:23:13.380019 4809 generic.go:334] "Generic (PLEG): container finished" podID="d64a9837-eab6-471a-a17e-cff4f9ed5a06" containerID="66274fb7fc43723cd1a2e40b455dc84787e3ccb86cc31b140c0500065d3c65eb" exitCode=0 Sep 30 00:23:13 crc kubenswrapper[4809]: I0930 00:23:13.380098 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m" event={"ID":"d64a9837-eab6-471a-a17e-cff4f9ed5a06","Type":"ContainerDied","Data":"66274fb7fc43723cd1a2e40b455dc84787e3ccb86cc31b140c0500065d3c65eb"} Sep 30 00:23:13 crc kubenswrapper[4809]: I0930 00:23:13.380267 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m" event={"ID":"d64a9837-eab6-471a-a17e-cff4f9ed5a06","Type":"ContainerStarted","Data":"819e9419ab6813fba0081fe0ac5542fd07e60fe8511cf304130c24cb1c40e2e6"} Sep 30 00:23:15 crc kubenswrapper[4809]: I0930 00:23:15.395893 4809 generic.go:334] "Generic (PLEG): container finished" podID="d64a9837-eab6-471a-a17e-cff4f9ed5a06" containerID="b31a30be7fcaf67ee5073eb59fcd780b76a9ae78410a5a048307263bd8f1c70f" exitCode=0 Sep 30 00:23:15 crc kubenswrapper[4809]: I0930 00:23:15.395997 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m" event={"ID":"d64a9837-eab6-471a-a17e-cff4f9ed5a06","Type":"ContainerDied","Data":"b31a30be7fcaf67ee5073eb59fcd780b76a9ae78410a5a048307263bd8f1c70f"} Sep 30 00:23:16 crc kubenswrapper[4809]: I0930 00:23:16.407533 4809 generic.go:334] "Generic (PLEG): container finished" podID="d64a9837-eab6-471a-a17e-cff4f9ed5a06" containerID="a902604b32a652d857a49970778cba1f0d41ecefd20b63c6bd457240f9558220" exitCode=0 Sep 30 00:23:16 crc kubenswrapper[4809]: I0930 00:23:16.407609 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m" event={"ID":"d64a9837-eab6-471a-a17e-cff4f9ed5a06","Type":"ContainerDied","Data":"a902604b32a652d857a49970778cba1f0d41ecefd20b63c6bd457240f9558220"} Sep 30 00:23:17 crc kubenswrapper[4809]: I0930 00:23:17.840578 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m" Sep 30 00:23:17 crc kubenswrapper[4809]: I0930 00:23:17.982382 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4g8dq\" (UniqueName: \"kubernetes.io/projected/d64a9837-eab6-471a-a17e-cff4f9ed5a06-kube-api-access-4g8dq\") pod \"d64a9837-eab6-471a-a17e-cff4f9ed5a06\" (UID: \"d64a9837-eab6-471a-a17e-cff4f9ed5a06\") " Sep 30 00:23:17 crc kubenswrapper[4809]: I0930 00:23:17.982532 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d64a9837-eab6-471a-a17e-cff4f9ed5a06-bundle\") pod \"d64a9837-eab6-471a-a17e-cff4f9ed5a06\" (UID: \"d64a9837-eab6-471a-a17e-cff4f9ed5a06\") " Sep 30 00:23:17 crc kubenswrapper[4809]: I0930 00:23:17.982579 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d64a9837-eab6-471a-a17e-cff4f9ed5a06-util\") pod \"d64a9837-eab6-471a-a17e-cff4f9ed5a06\" (UID: \"d64a9837-eab6-471a-a17e-cff4f9ed5a06\") " Sep 30 00:23:17 crc kubenswrapper[4809]: I0930 00:23:17.983414 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d64a9837-eab6-471a-a17e-cff4f9ed5a06-bundle" (OuterVolumeSpecName: "bundle") pod "d64a9837-eab6-471a-a17e-cff4f9ed5a06" (UID: "d64a9837-eab6-471a-a17e-cff4f9ed5a06"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:23:17 crc kubenswrapper[4809]: I0930 00:23:17.989616 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d64a9837-eab6-471a-a17e-cff4f9ed5a06-kube-api-access-4g8dq" (OuterVolumeSpecName: "kube-api-access-4g8dq") pod "d64a9837-eab6-471a-a17e-cff4f9ed5a06" (UID: "d64a9837-eab6-471a-a17e-cff4f9ed5a06"). InnerVolumeSpecName "kube-api-access-4g8dq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:23:18 crc kubenswrapper[4809]: I0930 00:23:18.084320 4809 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d64a9837-eab6-471a-a17e-cff4f9ed5a06-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:23:18 crc kubenswrapper[4809]: I0930 00:23:18.084361 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4g8dq\" (UniqueName: \"kubernetes.io/projected/d64a9837-eab6-471a-a17e-cff4f9ed5a06-kube-api-access-4g8dq\") on node \"crc\" DevicePath \"\"" Sep 30 00:23:18 crc kubenswrapper[4809]: I0930 00:23:18.176723 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d64a9837-eab6-471a-a17e-cff4f9ed5a06-util" (OuterVolumeSpecName: "util") pod "d64a9837-eab6-471a-a17e-cff4f9ed5a06" (UID: "d64a9837-eab6-471a-a17e-cff4f9ed5a06"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:23:18 crc kubenswrapper[4809]: I0930 00:23:18.185711 4809 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d64a9837-eab6-471a-a17e-cff4f9ed5a06-util\") on node \"crc\" DevicePath \"\"" Sep 30 00:23:18 crc kubenswrapper[4809]: I0930 00:23:18.426378 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m" event={"ID":"d64a9837-eab6-471a-a17e-cff4f9ed5a06","Type":"ContainerDied","Data":"819e9419ab6813fba0081fe0ac5542fd07e60fe8511cf304130c24cb1c40e2e6"} Sep 30 00:23:18 crc kubenswrapper[4809]: I0930 00:23:18.426424 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="819e9419ab6813fba0081fe0ac5542fd07e60fe8511cf304130c24cb1c40e2e6" Sep 30 00:23:18 crc kubenswrapper[4809]: I0930 00:23:18.426483 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m" Sep 30 00:23:21 crc kubenswrapper[4809]: I0930 00:23:21.350626 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-d824k"] Sep 30 00:23:21 crc kubenswrapper[4809]: E0930 00:23:21.351403 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d64a9837-eab6-471a-a17e-cff4f9ed5a06" containerName="extract" Sep 30 00:23:21 crc kubenswrapper[4809]: I0930 00:23:21.351421 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="d64a9837-eab6-471a-a17e-cff4f9ed5a06" containerName="extract" Sep 30 00:23:21 crc kubenswrapper[4809]: E0930 00:23:21.351440 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d64a9837-eab6-471a-a17e-cff4f9ed5a06" containerName="util" Sep 30 00:23:21 crc kubenswrapper[4809]: I0930 00:23:21.351450 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="d64a9837-eab6-471a-a17e-cff4f9ed5a06" containerName="util" Sep 30 00:23:21 crc kubenswrapper[4809]: E0930 00:23:21.351475 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d64a9837-eab6-471a-a17e-cff4f9ed5a06" containerName="pull" Sep 30 00:23:21 crc kubenswrapper[4809]: I0930 00:23:21.351486 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="d64a9837-eab6-471a-a17e-cff4f9ed5a06" containerName="pull" Sep 30 00:23:21 crc kubenswrapper[4809]: I0930 00:23:21.351691 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="d64a9837-eab6-471a-a17e-cff4f9ed5a06" containerName="extract" Sep 30 00:23:21 crc kubenswrapper[4809]: I0930 00:23:21.352396 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-d824k" Sep 30 00:23:21 crc kubenswrapper[4809]: I0930 00:23:21.355087 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Sep 30 00:23:21 crc kubenswrapper[4809]: I0930 00:23:21.357120 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-wplv6" Sep 30 00:23:21 crc kubenswrapper[4809]: I0930 00:23:21.368621 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-d824k"] Sep 30 00:23:21 crc kubenswrapper[4809]: I0930 00:23:21.372004 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Sep 30 00:23:21 crc kubenswrapper[4809]: I0930 00:23:21.427401 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvhkx\" (UniqueName: \"kubernetes.io/projected/5f50e79d-ecc2-4e0b-9d07-9e1746b5f0cf-kube-api-access-zvhkx\") pod \"nmstate-operator-5d6f6cfd66-d824k\" (UID: \"5f50e79d-ecc2-4e0b-9d07-9e1746b5f0cf\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-d824k" Sep 30 00:23:21 crc kubenswrapper[4809]: I0930 00:23:21.528544 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvhkx\" (UniqueName: \"kubernetes.io/projected/5f50e79d-ecc2-4e0b-9d07-9e1746b5f0cf-kube-api-access-zvhkx\") pod \"nmstate-operator-5d6f6cfd66-d824k\" (UID: \"5f50e79d-ecc2-4e0b-9d07-9e1746b5f0cf\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-d824k" Sep 30 00:23:21 crc kubenswrapper[4809]: I0930 00:23:21.569483 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvhkx\" (UniqueName: \"kubernetes.io/projected/5f50e79d-ecc2-4e0b-9d07-9e1746b5f0cf-kube-api-access-zvhkx\") pod \"nmstate-operator-5d6f6cfd66-d824k\" (UID: \"5f50e79d-ecc2-4e0b-9d07-9e1746b5f0cf\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-d824k" Sep 30 00:23:21 crc kubenswrapper[4809]: I0930 00:23:21.671016 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-d824k" Sep 30 00:23:22 crc kubenswrapper[4809]: I0930 00:23:22.177620 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-d824k"] Sep 30 00:23:22 crc kubenswrapper[4809]: W0930 00:23:22.189164 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5f50e79d_ecc2_4e0b_9d07_9e1746b5f0cf.slice/crio-4183aab60a244a2f4c5aaa88b1e82f7e3a4c592be5e50b206f0db986fb994fbe WatchSource:0}: Error finding container 4183aab60a244a2f4c5aaa88b1e82f7e3a4c592be5e50b206f0db986fb994fbe: Status 404 returned error can't find the container with id 4183aab60a244a2f4c5aaa88b1e82f7e3a4c592be5e50b206f0db986fb994fbe Sep 30 00:23:22 crc kubenswrapper[4809]: I0930 00:23:22.453054 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-d824k" event={"ID":"5f50e79d-ecc2-4e0b-9d07-9e1746b5f0cf","Type":"ContainerStarted","Data":"4183aab60a244a2f4c5aaa88b1e82f7e3a4c592be5e50b206f0db986fb994fbe"} Sep 30 00:23:25 crc kubenswrapper[4809]: I0930 00:23:25.474900 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-d824k" event={"ID":"5f50e79d-ecc2-4e0b-9d07-9e1746b5f0cf","Type":"ContainerStarted","Data":"ca9b1cab5c5c7da3b5c8fb375c94f45549b67d371c2090385d8b989e1aad7224"} Sep 30 00:23:25 crc kubenswrapper[4809]: I0930 00:23:25.496579 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-d824k" podStartSLOduration=1.587972493 podStartE2EDuration="4.49655529s" podCreationTimestamp="2025-09-30 00:23:21 +0000 UTC" firstStartedPulling="2025-09-30 00:23:22.191003626 +0000 UTC m=+853.227253024" lastFinishedPulling="2025-09-30 00:23:25.099586373 +0000 UTC m=+856.135835821" observedRunningTime="2025-09-30 00:23:25.490770125 +0000 UTC m=+856.527019603" watchObservedRunningTime="2025-09-30 00:23:25.49655529 +0000 UTC m=+856.532804708" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.527447 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-t4sfn"] Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.528534 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-t4sfn" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.531895 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-n9877" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.535467 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-qm4mw"] Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.536512 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-qm4mw" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.542869 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-cx9mr"] Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.543670 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-cx9mr" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.545323 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.546142 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-qm4mw"] Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.549998 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-cx9mr"] Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.599529 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-595ts\" (UniqueName: \"kubernetes.io/projected/53550ed2-303b-4654-bc54-7b6f8e992f89-kube-api-access-595ts\") pod \"nmstate-metrics-58fcddf996-qm4mw\" (UID: \"53550ed2-303b-4654-bc54-7b6f8e992f89\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-qm4mw" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.599571 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/7be99730-5c11-4b2b-b063-3500766ddfe6-dbus-socket\") pod \"nmstate-handler-t4sfn\" (UID: \"7be99730-5c11-4b2b-b063-3500766ddfe6\") " pod="openshift-nmstate/nmstate-handler-t4sfn" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.599601 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/7be99730-5c11-4b2b-b063-3500766ddfe6-nmstate-lock\") pod \"nmstate-handler-t4sfn\" (UID: \"7be99730-5c11-4b2b-b063-3500766ddfe6\") " pod="openshift-nmstate/nmstate-handler-t4sfn" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.599624 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnlnx\" (UniqueName: \"kubernetes.io/projected/7be99730-5c11-4b2b-b063-3500766ddfe6-kube-api-access-wnlnx\") pod \"nmstate-handler-t4sfn\" (UID: \"7be99730-5c11-4b2b-b063-3500766ddfe6\") " pod="openshift-nmstate/nmstate-handler-t4sfn" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.599663 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/4f6a4111-9185-4e06-84ae-105096b1fc26-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-cx9mr\" (UID: \"4f6a4111-9185-4e06-84ae-105096b1fc26\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-cx9mr" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.599682 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/7be99730-5c11-4b2b-b063-3500766ddfe6-ovs-socket\") pod \"nmstate-handler-t4sfn\" (UID: \"7be99730-5c11-4b2b-b063-3500766ddfe6\") " pod="openshift-nmstate/nmstate-handler-t4sfn" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.599717 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8m8zm\" (UniqueName: \"kubernetes.io/projected/4f6a4111-9185-4e06-84ae-105096b1fc26-kube-api-access-8m8zm\") pod \"nmstate-webhook-6d689559c5-cx9mr\" (UID: \"4f6a4111-9185-4e06-84ae-105096b1fc26\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-cx9mr" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.655081 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-g6zsl"] Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.656053 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-g6zsl" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.659261 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.659335 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-t4vcp" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.662781 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-g6zsl"] Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.664377 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.700946 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8m8zm\" (UniqueName: \"kubernetes.io/projected/4f6a4111-9185-4e06-84ae-105096b1fc26-kube-api-access-8m8zm\") pod \"nmstate-webhook-6d689559c5-cx9mr\" (UID: \"4f6a4111-9185-4e06-84ae-105096b1fc26\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-cx9mr" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.701059 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-595ts\" (UniqueName: \"kubernetes.io/projected/53550ed2-303b-4654-bc54-7b6f8e992f89-kube-api-access-595ts\") pod \"nmstate-metrics-58fcddf996-qm4mw\" (UID: \"53550ed2-303b-4654-bc54-7b6f8e992f89\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-qm4mw" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.701099 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/7be99730-5c11-4b2b-b063-3500766ddfe6-dbus-socket\") pod \"nmstate-handler-t4sfn\" (UID: \"7be99730-5c11-4b2b-b063-3500766ddfe6\") " pod="openshift-nmstate/nmstate-handler-t4sfn" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.701134 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/7be99730-5c11-4b2b-b063-3500766ddfe6-nmstate-lock\") pod \"nmstate-handler-t4sfn\" (UID: \"7be99730-5c11-4b2b-b063-3500766ddfe6\") " pod="openshift-nmstate/nmstate-handler-t4sfn" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.701161 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnlnx\" (UniqueName: \"kubernetes.io/projected/7be99730-5c11-4b2b-b063-3500766ddfe6-kube-api-access-wnlnx\") pod \"nmstate-handler-t4sfn\" (UID: \"7be99730-5c11-4b2b-b063-3500766ddfe6\") " pod="openshift-nmstate/nmstate-handler-t4sfn" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.701191 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/4f6a4111-9185-4e06-84ae-105096b1fc26-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-cx9mr\" (UID: \"4f6a4111-9185-4e06-84ae-105096b1fc26\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-cx9mr" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.701215 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/7be99730-5c11-4b2b-b063-3500766ddfe6-ovs-socket\") pod \"nmstate-handler-t4sfn\" (UID: \"7be99730-5c11-4b2b-b063-3500766ddfe6\") " pod="openshift-nmstate/nmstate-handler-t4sfn" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.701317 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/7be99730-5c11-4b2b-b063-3500766ddfe6-ovs-socket\") pod \"nmstate-handler-t4sfn\" (UID: \"7be99730-5c11-4b2b-b063-3500766ddfe6\") " pod="openshift-nmstate/nmstate-handler-t4sfn" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.701477 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/7be99730-5c11-4b2b-b063-3500766ddfe6-nmstate-lock\") pod \"nmstate-handler-t4sfn\" (UID: \"7be99730-5c11-4b2b-b063-3500766ddfe6\") " pod="openshift-nmstate/nmstate-handler-t4sfn" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.701529 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/7be99730-5c11-4b2b-b063-3500766ddfe6-dbus-socket\") pod \"nmstate-handler-t4sfn\" (UID: \"7be99730-5c11-4b2b-b063-3500766ddfe6\") " pod="openshift-nmstate/nmstate-handler-t4sfn" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.718021 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/4f6a4111-9185-4e06-84ae-105096b1fc26-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-cx9mr\" (UID: \"4f6a4111-9185-4e06-84ae-105096b1fc26\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-cx9mr" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.718117 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8m8zm\" (UniqueName: \"kubernetes.io/projected/4f6a4111-9185-4e06-84ae-105096b1fc26-kube-api-access-8m8zm\") pod \"nmstate-webhook-6d689559c5-cx9mr\" (UID: \"4f6a4111-9185-4e06-84ae-105096b1fc26\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-cx9mr" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.722160 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnlnx\" (UniqueName: \"kubernetes.io/projected/7be99730-5c11-4b2b-b063-3500766ddfe6-kube-api-access-wnlnx\") pod \"nmstate-handler-t4sfn\" (UID: \"7be99730-5c11-4b2b-b063-3500766ddfe6\") " pod="openshift-nmstate/nmstate-handler-t4sfn" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.725488 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-595ts\" (UniqueName: \"kubernetes.io/projected/53550ed2-303b-4654-bc54-7b6f8e992f89-kube-api-access-595ts\") pod \"nmstate-metrics-58fcddf996-qm4mw\" (UID: \"53550ed2-303b-4654-bc54-7b6f8e992f89\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-qm4mw" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.803183 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4dxv\" (UniqueName: \"kubernetes.io/projected/a0c7c770-d362-416b-9ccf-2e1ce42a1096-kube-api-access-c4dxv\") pod \"nmstate-console-plugin-864bb6dfb5-g6zsl\" (UID: \"a0c7c770-d362-416b-9ccf-2e1ce42a1096\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-g6zsl" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.803267 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/a0c7c770-d362-416b-9ccf-2e1ce42a1096-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-g6zsl\" (UID: \"a0c7c770-d362-416b-9ccf-2e1ce42a1096\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-g6zsl" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.803300 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a0c7c770-d362-416b-9ccf-2e1ce42a1096-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-g6zsl\" (UID: \"a0c7c770-d362-416b-9ccf-2e1ce42a1096\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-g6zsl" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.844591 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-t4sfn" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.845756 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-78fb9bb869-z7t6w"] Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.846567 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.854783 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-qm4mw" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.865610 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-cx9mr" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.875480 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-78fb9bb869-z7t6w"] Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.907460 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-oauth-serving-cert\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.907750 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbpm4\" (UniqueName: \"kubernetes.io/projected/fbd086e6-14a9-4b1d-8586-b937b64dfa28-kube-api-access-rbpm4\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.907787 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-trusted-ca-bundle\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.907805 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-service-ca\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.907922 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4dxv\" (UniqueName: \"kubernetes.io/projected/a0c7c770-d362-416b-9ccf-2e1ce42a1096-kube-api-access-c4dxv\") pod \"nmstate-console-plugin-864bb6dfb5-g6zsl\" (UID: \"a0c7c770-d362-416b-9ccf-2e1ce42a1096\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-g6zsl" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.907972 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-console-config\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.908033 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/a0c7c770-d362-416b-9ccf-2e1ce42a1096-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-g6zsl\" (UID: \"a0c7c770-d362-416b-9ccf-2e1ce42a1096\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-g6zsl" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.908084 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fbd086e6-14a9-4b1d-8586-b937b64dfa28-console-oauth-config\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.908114 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a0c7c770-d362-416b-9ccf-2e1ce42a1096-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-g6zsl\" (UID: \"a0c7c770-d362-416b-9ccf-2e1ce42a1096\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-g6zsl" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.908137 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fbd086e6-14a9-4b1d-8586-b937b64dfa28-console-serving-cert\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:26 crc kubenswrapper[4809]: E0930 00:23:26.908782 4809 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Sep 30 00:23:26 crc kubenswrapper[4809]: E0930 00:23:26.908826 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a0c7c770-d362-416b-9ccf-2e1ce42a1096-plugin-serving-cert podName:a0c7c770-d362-416b-9ccf-2e1ce42a1096 nodeName:}" failed. No retries permitted until 2025-09-30 00:23:27.408809216 +0000 UTC m=+858.445058624 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/a0c7c770-d362-416b-9ccf-2e1ce42a1096-plugin-serving-cert") pod "nmstate-console-plugin-864bb6dfb5-g6zsl" (UID: "a0c7c770-d362-416b-9ccf-2e1ce42a1096") : secret "plugin-serving-cert" not found Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.908955 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/a0c7c770-d362-416b-9ccf-2e1ce42a1096-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-g6zsl\" (UID: \"a0c7c770-d362-416b-9ccf-2e1ce42a1096\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-g6zsl" Sep 30 00:23:26 crc kubenswrapper[4809]: I0930 00:23:26.930422 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4dxv\" (UniqueName: \"kubernetes.io/projected/a0c7c770-d362-416b-9ccf-2e1ce42a1096-kube-api-access-c4dxv\") pod \"nmstate-console-plugin-864bb6dfb5-g6zsl\" (UID: \"a0c7c770-d362-416b-9ccf-2e1ce42a1096\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-g6zsl" Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.011890 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-service-ca\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.011979 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-console-config\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.012065 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fbd086e6-14a9-4b1d-8586-b937b64dfa28-console-oauth-config\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.012107 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fbd086e6-14a9-4b1d-8586-b937b64dfa28-console-serving-cert\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.012127 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-oauth-serving-cert\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.012150 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbpm4\" (UniqueName: \"kubernetes.io/projected/fbd086e6-14a9-4b1d-8586-b937b64dfa28-kube-api-access-rbpm4\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.012179 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-trusted-ca-bundle\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.013502 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-oauth-serving-cert\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.013967 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-service-ca\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.016809 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-trusted-ca-bundle\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.020602 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fbd086e6-14a9-4b1d-8586-b937b64dfa28-console-serving-cert\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.021147 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-console-config\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.030205 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fbd086e6-14a9-4b1d-8586-b937b64dfa28-console-oauth-config\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.036251 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbpm4\" (UniqueName: \"kubernetes.io/projected/fbd086e6-14a9-4b1d-8586-b937b64dfa28-kube-api-access-rbpm4\") pod \"console-78fb9bb869-z7t6w\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.165974 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.320141 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-qm4mw"] Sep 30 00:23:27 crc kubenswrapper[4809]: W0930 00:23:27.323542 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod53550ed2_303b_4654_bc54_7b6f8e992f89.slice/crio-aa742614f87713fa28c5aecf9482505430f832cfb32d8f6ef7f6f2fe895a46ab WatchSource:0}: Error finding container aa742614f87713fa28c5aecf9482505430f832cfb32d8f6ef7f6f2fe895a46ab: Status 404 returned error can't find the container with id aa742614f87713fa28c5aecf9482505430f832cfb32d8f6ef7f6f2fe895a46ab Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.373781 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-cx9mr"] Sep 30 00:23:27 crc kubenswrapper[4809]: W0930 00:23:27.378298 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f6a4111_9185_4e06_84ae_105096b1fc26.slice/crio-1f79b3a6b81ac1c7d1b3b5f63615b03a32b30b85b1f6c97a44164d9977a89af6 WatchSource:0}: Error finding container 1f79b3a6b81ac1c7d1b3b5f63615b03a32b30b85b1f6c97a44164d9977a89af6: Status 404 returned error can't find the container with id 1f79b3a6b81ac1c7d1b3b5f63615b03a32b30b85b1f6c97a44164d9977a89af6 Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.418232 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a0c7c770-d362-416b-9ccf-2e1ce42a1096-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-g6zsl\" (UID: \"a0c7c770-d362-416b-9ccf-2e1ce42a1096\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-g6zsl" Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.423484 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a0c7c770-d362-416b-9ccf-2e1ce42a1096-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-g6zsl\" (UID: \"a0c7c770-d362-416b-9ccf-2e1ce42a1096\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-g6zsl" Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.486070 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-cx9mr" event={"ID":"4f6a4111-9185-4e06-84ae-105096b1fc26","Type":"ContainerStarted","Data":"1f79b3a6b81ac1c7d1b3b5f63615b03a32b30b85b1f6c97a44164d9977a89af6"} Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.486957 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-t4sfn" event={"ID":"7be99730-5c11-4b2b-b063-3500766ddfe6","Type":"ContainerStarted","Data":"0cddb4786bb7bf964c3ac7931ca4e5b0ffca6488d79f92ed4b22b76d522fa3b7"} Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.487892 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-qm4mw" event={"ID":"53550ed2-303b-4654-bc54-7b6f8e992f89","Type":"ContainerStarted","Data":"aa742614f87713fa28c5aecf9482505430f832cfb32d8f6ef7f6f2fe895a46ab"} Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.574801 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-g6zsl" Sep 30 00:23:27 crc kubenswrapper[4809]: I0930 00:23:27.613214 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-78fb9bb869-z7t6w"] Sep 30 00:23:27 crc kubenswrapper[4809]: W0930 00:23:27.623274 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfbd086e6_14a9_4b1d_8586_b937b64dfa28.slice/crio-f5e9c3f07726cd7c0bebb46f9ddd906887acfbc16fdbdbf678173bab2d3d1d0e WatchSource:0}: Error finding container f5e9c3f07726cd7c0bebb46f9ddd906887acfbc16fdbdbf678173bab2d3d1d0e: Status 404 returned error can't find the container with id f5e9c3f07726cd7c0bebb46f9ddd906887acfbc16fdbdbf678173bab2d3d1d0e Sep 30 00:23:28 crc kubenswrapper[4809]: I0930 00:23:28.007340 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-g6zsl"] Sep 30 00:23:28 crc kubenswrapper[4809]: W0930 00:23:28.022880 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda0c7c770_d362_416b_9ccf_2e1ce42a1096.slice/crio-d9deab244afe86728dc3e1a77f59b113244d3e70b285daaf38253b94206b5f39 WatchSource:0}: Error finding container d9deab244afe86728dc3e1a77f59b113244d3e70b285daaf38253b94206b5f39: Status 404 returned error can't find the container with id d9deab244afe86728dc3e1a77f59b113244d3e70b285daaf38253b94206b5f39 Sep 30 00:23:28 crc kubenswrapper[4809]: I0930 00:23:28.496739 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-78fb9bb869-z7t6w" event={"ID":"fbd086e6-14a9-4b1d-8586-b937b64dfa28","Type":"ContainerStarted","Data":"0df70012ab4141c837123ccd7608c66b97e97cae637eec858e6cb5f79e74f00f"} Sep 30 00:23:28 crc kubenswrapper[4809]: I0930 00:23:28.497321 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-78fb9bb869-z7t6w" event={"ID":"fbd086e6-14a9-4b1d-8586-b937b64dfa28","Type":"ContainerStarted","Data":"f5e9c3f07726cd7c0bebb46f9ddd906887acfbc16fdbdbf678173bab2d3d1d0e"} Sep 30 00:23:28 crc kubenswrapper[4809]: I0930 00:23:28.497978 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-g6zsl" event={"ID":"a0c7c770-d362-416b-9ccf-2e1ce42a1096","Type":"ContainerStarted","Data":"d9deab244afe86728dc3e1a77f59b113244d3e70b285daaf38253b94206b5f39"} Sep 30 00:23:28 crc kubenswrapper[4809]: I0930 00:23:28.518919 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-78fb9bb869-z7t6w" podStartSLOduration=2.5188941529999997 podStartE2EDuration="2.518894153s" podCreationTimestamp="2025-09-30 00:23:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:23:28.515868692 +0000 UTC m=+859.552118090" watchObservedRunningTime="2025-09-30 00:23:28.518894153 +0000 UTC m=+859.555143561" Sep 30 00:23:30 crc kubenswrapper[4809]: I0930 00:23:30.514870 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-cx9mr" event={"ID":"4f6a4111-9185-4e06-84ae-105096b1fc26","Type":"ContainerStarted","Data":"45bf47d9d183adda592dacfa81743a2ad065d1c52492fe841cc87bb7d4c4a114"} Sep 30 00:23:30 crc kubenswrapper[4809]: I0930 00:23:30.515682 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6d689559c5-cx9mr" Sep 30 00:23:30 crc kubenswrapper[4809]: I0930 00:23:30.517569 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-t4sfn" event={"ID":"7be99730-5c11-4b2b-b063-3500766ddfe6","Type":"ContainerStarted","Data":"9b66e382eedb339669a2056f64f985aa90985bfa86fc5ed08a6bb0afe8ef95ca"} Sep 30 00:23:30 crc kubenswrapper[4809]: I0930 00:23:30.517705 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-t4sfn" Sep 30 00:23:30 crc kubenswrapper[4809]: I0930 00:23:30.524176 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-qm4mw" event={"ID":"53550ed2-303b-4654-bc54-7b6f8e992f89","Type":"ContainerStarted","Data":"690a8f0e44aa4454ea00742a89a51225352891e75b630b9fe254ff8c50d3d155"} Sep 30 00:23:30 crc kubenswrapper[4809]: I0930 00:23:30.535414 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6d689559c5-cx9mr" podStartSLOduration=2.32169437 podStartE2EDuration="4.535395128s" podCreationTimestamp="2025-09-30 00:23:26 +0000 UTC" firstStartedPulling="2025-09-30 00:23:27.380704102 +0000 UTC m=+858.416953520" lastFinishedPulling="2025-09-30 00:23:29.59440487 +0000 UTC m=+860.630654278" observedRunningTime="2025-09-30 00:23:30.532625664 +0000 UTC m=+861.568875092" watchObservedRunningTime="2025-09-30 00:23:30.535395128 +0000 UTC m=+861.571644526" Sep 30 00:23:30 crc kubenswrapper[4809]: I0930 00:23:30.560361 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-t4sfn" podStartSLOduration=1.916197975 podStartE2EDuration="4.560345404s" podCreationTimestamp="2025-09-30 00:23:26 +0000 UTC" firstStartedPulling="2025-09-30 00:23:26.909149565 +0000 UTC m=+857.945398973" lastFinishedPulling="2025-09-30 00:23:29.553296994 +0000 UTC m=+860.589546402" observedRunningTime="2025-09-30 00:23:30.557866078 +0000 UTC m=+861.594115486" watchObservedRunningTime="2025-09-30 00:23:30.560345404 +0000 UTC m=+861.596594812" Sep 30 00:23:31 crc kubenswrapper[4809]: I0930 00:23:31.531088 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-g6zsl" event={"ID":"a0c7c770-d362-416b-9ccf-2e1ce42a1096","Type":"ContainerStarted","Data":"89000657f98d68baef51d3c447804dd8705a5d39b903942b4e4300a6f95e0ea8"} Sep 30 00:23:31 crc kubenswrapper[4809]: I0930 00:23:31.552350 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-g6zsl" podStartSLOduration=2.7649062989999997 podStartE2EDuration="5.552330012s" podCreationTimestamp="2025-09-30 00:23:26 +0000 UTC" firstStartedPulling="2025-09-30 00:23:28.029083089 +0000 UTC m=+859.065332497" lastFinishedPulling="2025-09-30 00:23:30.816506802 +0000 UTC m=+861.852756210" observedRunningTime="2025-09-30 00:23:31.551901751 +0000 UTC m=+862.588151199" watchObservedRunningTime="2025-09-30 00:23:31.552330012 +0000 UTC m=+862.588579420" Sep 30 00:23:34 crc kubenswrapper[4809]: I0930 00:23:34.561608 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-qm4mw" event={"ID":"53550ed2-303b-4654-bc54-7b6f8e992f89","Type":"ContainerStarted","Data":"1e5e4ee2ae43b9c00b696cdc0a4341359a141f9102a9edac88f2052f3319758b"} Sep 30 00:23:34 crc kubenswrapper[4809]: I0930 00:23:34.582692 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-58fcddf996-qm4mw" podStartSLOduration=2.071255695 podStartE2EDuration="8.58266784s" podCreationTimestamp="2025-09-30 00:23:26 +0000 UTC" firstStartedPulling="2025-09-30 00:23:27.326873665 +0000 UTC m=+858.363123073" lastFinishedPulling="2025-09-30 00:23:33.83828581 +0000 UTC m=+864.874535218" observedRunningTime="2025-09-30 00:23:34.57853284 +0000 UTC m=+865.614782248" watchObservedRunningTime="2025-09-30 00:23:34.58266784 +0000 UTC m=+865.618917248" Sep 30 00:23:36 crc kubenswrapper[4809]: I0930 00:23:36.887631 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-t4sfn" Sep 30 00:23:37 crc kubenswrapper[4809]: I0930 00:23:37.166304 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:37 crc kubenswrapper[4809]: I0930 00:23:37.166353 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:37 crc kubenswrapper[4809]: I0930 00:23:37.170882 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:37 crc kubenswrapper[4809]: I0930 00:23:37.587102 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:23:37 crc kubenswrapper[4809]: I0930 00:23:37.672132 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-gjq9t"] Sep 30 00:23:46 crc kubenswrapper[4809]: I0930 00:23:46.875103 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6d689559c5-cx9mr" Sep 30 00:23:55 crc kubenswrapper[4809]: I0930 00:23:55.324605 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:23:55 crc kubenswrapper[4809]: I0930 00:23:55.325152 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:24:02 crc kubenswrapper[4809]: I0930 00:24:02.715713 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-gjq9t" podUID="513c8a3a-0885-48a4-ad5d-7d1dab1fcb05" containerName="console" containerID="cri-o://5e7906f1d2b769184e067fffbefdd7be56b7289c19d8d2a1b2875cee81bd14ec" gracePeriod=15 Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.192198 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-gjq9t_513c8a3a-0885-48a4-ad5d-7d1dab1fcb05/console/0.log" Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.192564 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.264363 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-trusted-ca-bundle\") pod \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.264783 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-oauth-serving-cert\") pod \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.264834 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-service-ca\") pod \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.264982 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-console-serving-cert\") pod \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.265020 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-console-config\") pod \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.265047 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hsgs\" (UniqueName: \"kubernetes.io/projected/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-kube-api-access-7hsgs\") pod \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.265110 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-console-oauth-config\") pod \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\" (UID: \"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05\") " Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.265519 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "513c8a3a-0885-48a4-ad5d-7d1dab1fcb05" (UID: "513c8a3a-0885-48a4-ad5d-7d1dab1fcb05"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.266342 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-console-config" (OuterVolumeSpecName: "console-config") pod "513c8a3a-0885-48a4-ad5d-7d1dab1fcb05" (UID: "513c8a3a-0885-48a4-ad5d-7d1dab1fcb05"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.266393 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "513c8a3a-0885-48a4-ad5d-7d1dab1fcb05" (UID: "513c8a3a-0885-48a4-ad5d-7d1dab1fcb05"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.266632 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-service-ca" (OuterVolumeSpecName: "service-ca") pod "513c8a3a-0885-48a4-ad5d-7d1dab1fcb05" (UID: "513c8a3a-0885-48a4-ad5d-7d1dab1fcb05"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.271893 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-kube-api-access-7hsgs" (OuterVolumeSpecName: "kube-api-access-7hsgs") pod "513c8a3a-0885-48a4-ad5d-7d1dab1fcb05" (UID: "513c8a3a-0885-48a4-ad5d-7d1dab1fcb05"). InnerVolumeSpecName "kube-api-access-7hsgs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.272351 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "513c8a3a-0885-48a4-ad5d-7d1dab1fcb05" (UID: "513c8a3a-0885-48a4-ad5d-7d1dab1fcb05"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.274002 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "513c8a3a-0885-48a4-ad5d-7d1dab1fcb05" (UID: "513c8a3a-0885-48a4-ad5d-7d1dab1fcb05"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.366720 4809 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.366760 4809 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-console-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.366772 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hsgs\" (UniqueName: \"kubernetes.io/projected/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-kube-api-access-7hsgs\") on node \"crc\" DevicePath \"\"" Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.366793 4809 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.366805 4809 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.366816 4809 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.366826 4809 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.811400 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-gjq9t_513c8a3a-0885-48a4-ad5d-7d1dab1fcb05/console/0.log" Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.811447 4809 generic.go:334] "Generic (PLEG): container finished" podID="513c8a3a-0885-48a4-ad5d-7d1dab1fcb05" containerID="5e7906f1d2b769184e067fffbefdd7be56b7289c19d8d2a1b2875cee81bd14ec" exitCode=2 Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.811476 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-gjq9t" event={"ID":"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05","Type":"ContainerDied","Data":"5e7906f1d2b769184e067fffbefdd7be56b7289c19d8d2a1b2875cee81bd14ec"} Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.811504 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-gjq9t" event={"ID":"513c8a3a-0885-48a4-ad5d-7d1dab1fcb05","Type":"ContainerDied","Data":"1ac4a505560e98a81743d982570aa46eb561e7d7bc7037d47161c6f4611f81cd"} Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.811521 4809 scope.go:117] "RemoveContainer" containerID="5e7906f1d2b769184e067fffbefdd7be56b7289c19d8d2a1b2875cee81bd14ec" Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.811540 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-gjq9t" Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.831865 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-gjq9t"] Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.835862 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-gjq9t"] Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.837219 4809 scope.go:117] "RemoveContainer" containerID="5e7906f1d2b769184e067fffbefdd7be56b7289c19d8d2a1b2875cee81bd14ec" Sep 30 00:24:03 crc kubenswrapper[4809]: E0930 00:24:03.837826 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e7906f1d2b769184e067fffbefdd7be56b7289c19d8d2a1b2875cee81bd14ec\": container with ID starting with 5e7906f1d2b769184e067fffbefdd7be56b7289c19d8d2a1b2875cee81bd14ec not found: ID does not exist" containerID="5e7906f1d2b769184e067fffbefdd7be56b7289c19d8d2a1b2875cee81bd14ec" Sep 30 00:24:03 crc kubenswrapper[4809]: I0930 00:24:03.837853 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e7906f1d2b769184e067fffbefdd7be56b7289c19d8d2a1b2875cee81bd14ec"} err="failed to get container status \"5e7906f1d2b769184e067fffbefdd7be56b7289c19d8d2a1b2875cee81bd14ec\": rpc error: code = NotFound desc = could not find container \"5e7906f1d2b769184e067fffbefdd7be56b7289c19d8d2a1b2875cee81bd14ec\": container with ID starting with 5e7906f1d2b769184e067fffbefdd7be56b7289c19d8d2a1b2875cee81bd14ec not found: ID does not exist" Sep 30 00:24:04 crc kubenswrapper[4809]: I0930 00:24:04.082195 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5"] Sep 30 00:24:04 crc kubenswrapper[4809]: E0930 00:24:04.082793 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="513c8a3a-0885-48a4-ad5d-7d1dab1fcb05" containerName="console" Sep 30 00:24:04 crc kubenswrapper[4809]: I0930 00:24:04.082814 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="513c8a3a-0885-48a4-ad5d-7d1dab1fcb05" containerName="console" Sep 30 00:24:04 crc kubenswrapper[4809]: I0930 00:24:04.082951 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="513c8a3a-0885-48a4-ad5d-7d1dab1fcb05" containerName="console" Sep 30 00:24:04 crc kubenswrapper[4809]: I0930 00:24:04.084142 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5" Sep 30 00:24:04 crc kubenswrapper[4809]: I0930 00:24:04.085990 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 00:24:04 crc kubenswrapper[4809]: I0930 00:24:04.089505 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5"] Sep 30 00:24:04 crc kubenswrapper[4809]: I0930 00:24:04.182028 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/afa86b55-6324-47b3-a861-8075fb31f4e1-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5\" (UID: \"afa86b55-6324-47b3-a861-8075fb31f4e1\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5" Sep 30 00:24:04 crc kubenswrapper[4809]: I0930 00:24:04.182077 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/afa86b55-6324-47b3-a861-8075fb31f4e1-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5\" (UID: \"afa86b55-6324-47b3-a861-8075fb31f4e1\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5" Sep 30 00:24:04 crc kubenswrapper[4809]: I0930 00:24:04.182116 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srr2j\" (UniqueName: \"kubernetes.io/projected/afa86b55-6324-47b3-a861-8075fb31f4e1-kube-api-access-srr2j\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5\" (UID: \"afa86b55-6324-47b3-a861-8075fb31f4e1\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5" Sep 30 00:24:04 crc kubenswrapper[4809]: I0930 00:24:04.283391 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/afa86b55-6324-47b3-a861-8075fb31f4e1-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5\" (UID: \"afa86b55-6324-47b3-a861-8075fb31f4e1\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5" Sep 30 00:24:04 crc kubenswrapper[4809]: I0930 00:24:04.283434 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/afa86b55-6324-47b3-a861-8075fb31f4e1-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5\" (UID: \"afa86b55-6324-47b3-a861-8075fb31f4e1\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5" Sep 30 00:24:04 crc kubenswrapper[4809]: I0930 00:24:04.284037 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/afa86b55-6324-47b3-a861-8075fb31f4e1-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5\" (UID: \"afa86b55-6324-47b3-a861-8075fb31f4e1\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5" Sep 30 00:24:04 crc kubenswrapper[4809]: I0930 00:24:04.284077 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srr2j\" (UniqueName: \"kubernetes.io/projected/afa86b55-6324-47b3-a861-8075fb31f4e1-kube-api-access-srr2j\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5\" (UID: \"afa86b55-6324-47b3-a861-8075fb31f4e1\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5" Sep 30 00:24:04 crc kubenswrapper[4809]: I0930 00:24:04.284112 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/afa86b55-6324-47b3-a861-8075fb31f4e1-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5\" (UID: \"afa86b55-6324-47b3-a861-8075fb31f4e1\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5" Sep 30 00:24:04 crc kubenswrapper[4809]: I0930 00:24:04.310761 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srr2j\" (UniqueName: \"kubernetes.io/projected/afa86b55-6324-47b3-a861-8075fb31f4e1-kube-api-access-srr2j\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5\" (UID: \"afa86b55-6324-47b3-a861-8075fb31f4e1\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5" Sep 30 00:24:04 crc kubenswrapper[4809]: I0930 00:24:04.415043 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5" Sep 30 00:24:04 crc kubenswrapper[4809]: I0930 00:24:04.834263 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5"] Sep 30 00:24:05 crc kubenswrapper[4809]: I0930 00:24:05.706761 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="513c8a3a-0885-48a4-ad5d-7d1dab1fcb05" path="/var/lib/kubelet/pods/513c8a3a-0885-48a4-ad5d-7d1dab1fcb05/volumes" Sep 30 00:24:05 crc kubenswrapper[4809]: I0930 00:24:05.827947 4809 generic.go:334] "Generic (PLEG): container finished" podID="afa86b55-6324-47b3-a861-8075fb31f4e1" containerID="eae6c0722cf1db281a692ed6926b95c56ee01847dfe4cd952522fecdb4155241" exitCode=0 Sep 30 00:24:05 crc kubenswrapper[4809]: I0930 00:24:05.828012 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5" event={"ID":"afa86b55-6324-47b3-a861-8075fb31f4e1","Type":"ContainerDied","Data":"eae6c0722cf1db281a692ed6926b95c56ee01847dfe4cd952522fecdb4155241"} Sep 30 00:24:05 crc kubenswrapper[4809]: I0930 00:24:05.828080 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5" event={"ID":"afa86b55-6324-47b3-a861-8075fb31f4e1","Type":"ContainerStarted","Data":"e3d9428c1a262140d0374d9723e0d51359dee07529f46fbd63c46060423b5ac1"} Sep 30 00:24:07 crc kubenswrapper[4809]: I0930 00:24:07.843302 4809 generic.go:334] "Generic (PLEG): container finished" podID="afa86b55-6324-47b3-a861-8075fb31f4e1" containerID="712fc3bd9ebf808f2e11802d31adeebea7be160b0c4fb9c0ce376b584455e9f6" exitCode=0 Sep 30 00:24:07 crc kubenswrapper[4809]: I0930 00:24:07.843351 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5" event={"ID":"afa86b55-6324-47b3-a861-8075fb31f4e1","Type":"ContainerDied","Data":"712fc3bd9ebf808f2e11802d31adeebea7be160b0c4fb9c0ce376b584455e9f6"} Sep 30 00:24:08 crc kubenswrapper[4809]: I0930 00:24:08.854456 4809 generic.go:334] "Generic (PLEG): container finished" podID="afa86b55-6324-47b3-a861-8075fb31f4e1" containerID="bbe41dfcb6e45cb0494ec753f96e0d83f0e1d0714ed84f4b38dcd2f9a71e1c6a" exitCode=0 Sep 30 00:24:08 crc kubenswrapper[4809]: I0930 00:24:08.854533 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5" event={"ID":"afa86b55-6324-47b3-a861-8075fb31f4e1","Type":"ContainerDied","Data":"bbe41dfcb6e45cb0494ec753f96e0d83f0e1d0714ed84f4b38dcd2f9a71e1c6a"} Sep 30 00:24:10 crc kubenswrapper[4809]: I0930 00:24:10.156291 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5" Sep 30 00:24:10 crc kubenswrapper[4809]: I0930 00:24:10.177962 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/afa86b55-6324-47b3-a861-8075fb31f4e1-bundle\") pod \"afa86b55-6324-47b3-a861-8075fb31f4e1\" (UID: \"afa86b55-6324-47b3-a861-8075fb31f4e1\") " Sep 30 00:24:10 crc kubenswrapper[4809]: I0930 00:24:10.178168 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/afa86b55-6324-47b3-a861-8075fb31f4e1-util\") pod \"afa86b55-6324-47b3-a861-8075fb31f4e1\" (UID: \"afa86b55-6324-47b3-a861-8075fb31f4e1\") " Sep 30 00:24:10 crc kubenswrapper[4809]: I0930 00:24:10.178309 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-srr2j\" (UniqueName: \"kubernetes.io/projected/afa86b55-6324-47b3-a861-8075fb31f4e1-kube-api-access-srr2j\") pod \"afa86b55-6324-47b3-a861-8075fb31f4e1\" (UID: \"afa86b55-6324-47b3-a861-8075fb31f4e1\") " Sep 30 00:24:10 crc kubenswrapper[4809]: I0930 00:24:10.179510 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/afa86b55-6324-47b3-a861-8075fb31f4e1-bundle" (OuterVolumeSpecName: "bundle") pod "afa86b55-6324-47b3-a861-8075fb31f4e1" (UID: "afa86b55-6324-47b3-a861-8075fb31f4e1"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:24:10 crc kubenswrapper[4809]: I0930 00:24:10.185133 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afa86b55-6324-47b3-a861-8075fb31f4e1-kube-api-access-srr2j" (OuterVolumeSpecName: "kube-api-access-srr2j") pod "afa86b55-6324-47b3-a861-8075fb31f4e1" (UID: "afa86b55-6324-47b3-a861-8075fb31f4e1"). InnerVolumeSpecName "kube-api-access-srr2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:24:10 crc kubenswrapper[4809]: I0930 00:24:10.196466 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/afa86b55-6324-47b3-a861-8075fb31f4e1-util" (OuterVolumeSpecName: "util") pod "afa86b55-6324-47b3-a861-8075fb31f4e1" (UID: "afa86b55-6324-47b3-a861-8075fb31f4e1"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:24:10 crc kubenswrapper[4809]: I0930 00:24:10.286366 4809 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/afa86b55-6324-47b3-a861-8075fb31f4e1-util\") on node \"crc\" DevicePath \"\"" Sep 30 00:24:10 crc kubenswrapper[4809]: I0930 00:24:10.286403 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-srr2j\" (UniqueName: \"kubernetes.io/projected/afa86b55-6324-47b3-a861-8075fb31f4e1-kube-api-access-srr2j\") on node \"crc\" DevicePath \"\"" Sep 30 00:24:10 crc kubenswrapper[4809]: I0930 00:24:10.286418 4809 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/afa86b55-6324-47b3-a861-8075fb31f4e1-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:24:10 crc kubenswrapper[4809]: I0930 00:24:10.876640 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5" event={"ID":"afa86b55-6324-47b3-a861-8075fb31f4e1","Type":"ContainerDied","Data":"e3d9428c1a262140d0374d9723e0d51359dee07529f46fbd63c46060423b5ac1"} Sep 30 00:24:10 crc kubenswrapper[4809]: I0930 00:24:10.876714 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3d9428c1a262140d0374d9723e0d51359dee07529f46fbd63c46060423b5ac1" Sep 30 00:24:10 crc kubenswrapper[4809]: I0930 00:24:10.876986 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.496408 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-664f94654f-smv98"] Sep 30 00:24:19 crc kubenswrapper[4809]: E0930 00:24:19.497765 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afa86b55-6324-47b3-a861-8075fb31f4e1" containerName="util" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.497784 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="afa86b55-6324-47b3-a861-8075fb31f4e1" containerName="util" Sep 30 00:24:19 crc kubenswrapper[4809]: E0930 00:24:19.497829 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afa86b55-6324-47b3-a861-8075fb31f4e1" containerName="extract" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.497837 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="afa86b55-6324-47b3-a861-8075fb31f4e1" containerName="extract" Sep 30 00:24:19 crc kubenswrapper[4809]: E0930 00:24:19.497855 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afa86b55-6324-47b3-a861-8075fb31f4e1" containerName="pull" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.497864 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="afa86b55-6324-47b3-a861-8075fb31f4e1" containerName="pull" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.498142 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="afa86b55-6324-47b3-a861-8075fb31f4e1" containerName="extract" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.499755 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-664f94654f-smv98" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.503788 4809 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.506617 4809 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.506966 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.507266 4809 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-lc746" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.507449 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.541453 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-664f94654f-smv98"] Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.635785 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5q95r\" (UniqueName: \"kubernetes.io/projected/4c3e6257-d644-4970-9cb2-08af086cbe4b-kube-api-access-5q95r\") pod \"metallb-operator-controller-manager-664f94654f-smv98\" (UID: \"4c3e6257-d644-4970-9cb2-08af086cbe4b\") " pod="metallb-system/metallb-operator-controller-manager-664f94654f-smv98" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.635867 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4c3e6257-d644-4970-9cb2-08af086cbe4b-apiservice-cert\") pod \"metallb-operator-controller-manager-664f94654f-smv98\" (UID: \"4c3e6257-d644-4970-9cb2-08af086cbe4b\") " pod="metallb-system/metallb-operator-controller-manager-664f94654f-smv98" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.635925 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4c3e6257-d644-4970-9cb2-08af086cbe4b-webhook-cert\") pod \"metallb-operator-controller-manager-664f94654f-smv98\" (UID: \"4c3e6257-d644-4970-9cb2-08af086cbe4b\") " pod="metallb-system/metallb-operator-controller-manager-664f94654f-smv98" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.716867 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-656594ff57-qhpbb"] Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.717840 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-656594ff57-qhpbb" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.721117 4809 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.721225 4809 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.725320 4809 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-5ld62" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.736729 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4c3e6257-d644-4970-9cb2-08af086cbe4b-webhook-cert\") pod \"metallb-operator-controller-manager-664f94654f-smv98\" (UID: \"4c3e6257-d644-4970-9cb2-08af086cbe4b\") " pod="metallb-system/metallb-operator-controller-manager-664f94654f-smv98" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.736801 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5q95r\" (UniqueName: \"kubernetes.io/projected/4c3e6257-d644-4970-9cb2-08af086cbe4b-kube-api-access-5q95r\") pod \"metallb-operator-controller-manager-664f94654f-smv98\" (UID: \"4c3e6257-d644-4970-9cb2-08af086cbe4b\") " pod="metallb-system/metallb-operator-controller-manager-664f94654f-smv98" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.736876 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4c3e6257-d644-4970-9cb2-08af086cbe4b-apiservice-cert\") pod \"metallb-operator-controller-manager-664f94654f-smv98\" (UID: \"4c3e6257-d644-4970-9cb2-08af086cbe4b\") " pod="metallb-system/metallb-operator-controller-manager-664f94654f-smv98" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.743282 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4c3e6257-d644-4970-9cb2-08af086cbe4b-apiservice-cert\") pod \"metallb-operator-controller-manager-664f94654f-smv98\" (UID: \"4c3e6257-d644-4970-9cb2-08af086cbe4b\") " pod="metallb-system/metallb-operator-controller-manager-664f94654f-smv98" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.744365 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4c3e6257-d644-4970-9cb2-08af086cbe4b-webhook-cert\") pod \"metallb-operator-controller-manager-664f94654f-smv98\" (UID: \"4c3e6257-d644-4970-9cb2-08af086cbe4b\") " pod="metallb-system/metallb-operator-controller-manager-664f94654f-smv98" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.760569 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5q95r\" (UniqueName: \"kubernetes.io/projected/4c3e6257-d644-4970-9cb2-08af086cbe4b-kube-api-access-5q95r\") pod \"metallb-operator-controller-manager-664f94654f-smv98\" (UID: \"4c3e6257-d644-4970-9cb2-08af086cbe4b\") " pod="metallb-system/metallb-operator-controller-manager-664f94654f-smv98" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.786293 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-656594ff57-qhpbb"] Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.838921 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrpng\" (UniqueName: \"kubernetes.io/projected/2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb-kube-api-access-wrpng\") pod \"metallb-operator-webhook-server-656594ff57-qhpbb\" (UID: \"2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb\") " pod="metallb-system/metallb-operator-webhook-server-656594ff57-qhpbb" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.839034 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb-webhook-cert\") pod \"metallb-operator-webhook-server-656594ff57-qhpbb\" (UID: \"2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb\") " pod="metallb-system/metallb-operator-webhook-server-656594ff57-qhpbb" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.839086 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb-apiservice-cert\") pod \"metallb-operator-webhook-server-656594ff57-qhpbb\" (UID: \"2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb\") " pod="metallb-system/metallb-operator-webhook-server-656594ff57-qhpbb" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.855262 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-664f94654f-smv98" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.940747 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrpng\" (UniqueName: \"kubernetes.io/projected/2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb-kube-api-access-wrpng\") pod \"metallb-operator-webhook-server-656594ff57-qhpbb\" (UID: \"2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb\") " pod="metallb-system/metallb-operator-webhook-server-656594ff57-qhpbb" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.940816 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb-webhook-cert\") pod \"metallb-operator-webhook-server-656594ff57-qhpbb\" (UID: \"2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb\") " pod="metallb-system/metallb-operator-webhook-server-656594ff57-qhpbb" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.940853 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb-apiservice-cert\") pod \"metallb-operator-webhook-server-656594ff57-qhpbb\" (UID: \"2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb\") " pod="metallb-system/metallb-operator-webhook-server-656594ff57-qhpbb" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.945590 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb-apiservice-cert\") pod \"metallb-operator-webhook-server-656594ff57-qhpbb\" (UID: \"2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb\") " pod="metallb-system/metallb-operator-webhook-server-656594ff57-qhpbb" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.950621 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb-webhook-cert\") pod \"metallb-operator-webhook-server-656594ff57-qhpbb\" (UID: \"2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb\") " pod="metallb-system/metallb-operator-webhook-server-656594ff57-qhpbb" Sep 30 00:24:19 crc kubenswrapper[4809]: I0930 00:24:19.981465 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrpng\" (UniqueName: \"kubernetes.io/projected/2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb-kube-api-access-wrpng\") pod \"metallb-operator-webhook-server-656594ff57-qhpbb\" (UID: \"2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb\") " pod="metallb-system/metallb-operator-webhook-server-656594ff57-qhpbb" Sep 30 00:24:20 crc kubenswrapper[4809]: I0930 00:24:20.032247 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-656594ff57-qhpbb" Sep 30 00:24:20 crc kubenswrapper[4809]: I0930 00:24:20.171214 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-664f94654f-smv98"] Sep 30 00:24:20 crc kubenswrapper[4809]: I0930 00:24:20.186085 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 00:24:20 crc kubenswrapper[4809]: I0930 00:24:20.553489 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-656594ff57-qhpbb"] Sep 30 00:24:20 crc kubenswrapper[4809]: W0930 00:24:20.560055 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b0bb1ea_c64d_4c16_8bb9_10c2a477adcb.slice/crio-9faec95ee4edbf8329803ea51ded364675f29ea93026df02b4624c393e96c46c WatchSource:0}: Error finding container 9faec95ee4edbf8329803ea51ded364675f29ea93026df02b4624c393e96c46c: Status 404 returned error can't find the container with id 9faec95ee4edbf8329803ea51ded364675f29ea93026df02b4624c393e96c46c Sep 30 00:24:20 crc kubenswrapper[4809]: I0930 00:24:20.938807 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-656594ff57-qhpbb" event={"ID":"2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb","Type":"ContainerStarted","Data":"9faec95ee4edbf8329803ea51ded364675f29ea93026df02b4624c393e96c46c"} Sep 30 00:24:20 crc kubenswrapper[4809]: I0930 00:24:20.939818 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-664f94654f-smv98" event={"ID":"4c3e6257-d644-4970-9cb2-08af086cbe4b","Type":"ContainerStarted","Data":"c38d9047f0c3d2a6527ea6e6c5547f2dae427caaa6fb410efce0304d68db2185"} Sep 30 00:24:25 crc kubenswrapper[4809]: I0930 00:24:25.324917 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:24:25 crc kubenswrapper[4809]: I0930 00:24:25.325474 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:24:25 crc kubenswrapper[4809]: I0930 00:24:25.995627 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-656594ff57-qhpbb" event={"ID":"2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb","Type":"ContainerStarted","Data":"cdc05ab5b9e28450cafedca4c1814d0a30ceb67425289af3e90024e5f8493077"} Sep 30 00:24:25 crc kubenswrapper[4809]: I0930 00:24:25.995880 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-656594ff57-qhpbb" Sep 30 00:24:25 crc kubenswrapper[4809]: I0930 00:24:25.997467 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-664f94654f-smv98" event={"ID":"4c3e6257-d644-4970-9cb2-08af086cbe4b","Type":"ContainerStarted","Data":"234d308b49c1293471dc20c162ebe4938b981619d1bfab5a8edc48633946086a"} Sep 30 00:24:25 crc kubenswrapper[4809]: I0930 00:24:25.997811 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-664f94654f-smv98" Sep 30 00:24:26 crc kubenswrapper[4809]: I0930 00:24:26.016543 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-656594ff57-qhpbb" podStartSLOduration=2.40234238 podStartE2EDuration="7.016525434s" podCreationTimestamp="2025-09-30 00:24:19 +0000 UTC" firstStartedPulling="2025-09-30 00:24:20.562802961 +0000 UTC m=+911.599052369" lastFinishedPulling="2025-09-30 00:24:25.176986015 +0000 UTC m=+916.213235423" observedRunningTime="2025-09-30 00:24:26.011711416 +0000 UTC m=+917.047960824" watchObservedRunningTime="2025-09-30 00:24:26.016525434 +0000 UTC m=+917.052774832" Sep 30 00:24:26 crc kubenswrapper[4809]: I0930 00:24:26.038312 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-664f94654f-smv98" podStartSLOduration=2.07752601 podStartE2EDuration="7.038281825s" podCreationTimestamp="2025-09-30 00:24:19 +0000 UTC" firstStartedPulling="2025-09-30 00:24:20.185593933 +0000 UTC m=+911.221843351" lastFinishedPulling="2025-09-30 00:24:25.146349758 +0000 UTC m=+916.182599166" observedRunningTime="2025-09-30 00:24:26.030576579 +0000 UTC m=+917.066826057" watchObservedRunningTime="2025-09-30 00:24:26.038281825 +0000 UTC m=+917.074531263" Sep 30 00:24:40 crc kubenswrapper[4809]: I0930 00:24:40.037902 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-656594ff57-qhpbb" Sep 30 00:24:55 crc kubenswrapper[4809]: I0930 00:24:55.325347 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:24:55 crc kubenswrapper[4809]: I0930 00:24:55.326013 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:24:55 crc kubenswrapper[4809]: I0930 00:24:55.326085 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:24:55 crc kubenswrapper[4809]: I0930 00:24:55.326986 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1c9f0940c0710f8074f11d7eb6412ad3db197af8b05e861bb012eb4cb786e097"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:24:55 crc kubenswrapper[4809]: I0930 00:24:55.327078 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://1c9f0940c0710f8074f11d7eb6412ad3db197af8b05e861bb012eb4cb786e097" gracePeriod=600 Sep 30 00:24:56 crc kubenswrapper[4809]: I0930 00:24:56.215690 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="1c9f0940c0710f8074f11d7eb6412ad3db197af8b05e861bb012eb4cb786e097" exitCode=0 Sep 30 00:24:56 crc kubenswrapper[4809]: I0930 00:24:56.215752 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"1c9f0940c0710f8074f11d7eb6412ad3db197af8b05e861bb012eb4cb786e097"} Sep 30 00:24:56 crc kubenswrapper[4809]: I0930 00:24:56.216098 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"980f1cfcbca928da130d5259c71b79d3b5762bb3b4baa2fd8b3f457e575da03a"} Sep 30 00:24:56 crc kubenswrapper[4809]: I0930 00:24:56.216120 4809 scope.go:117] "RemoveContainer" containerID="165bd6d9351c1f3568d24afda65f12e5fa3c3ab08edb7e15f4eaa480ba979d2d" Sep 30 00:24:59 crc kubenswrapper[4809]: I0930 00:24:59.859271 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-664f94654f-smv98" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.673399 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-n7lvn"] Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.674692 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-n7lvn" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.676944 4809 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.677290 4809 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-97cqq" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.688576 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-4wd69"] Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.691762 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.693722 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.693939 4809 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.696525 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-n7lvn"] Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.750996 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-p7464"] Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.752287 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-p7464" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.756560 4809 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.757178 4809 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.757283 4809 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-whh6x" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.757540 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.775421 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5d688f5ffc-8pcmd"] Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.776775 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-8pcmd" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.779421 4809 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.783323 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/be322c70-01b7-420f-b3bb-3f77a839930f-metrics-certs\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.783385 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/be322c70-01b7-420f-b3bb-3f77a839930f-reloader\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.783483 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzwvl\" (UniqueName: \"kubernetes.io/projected/be322c70-01b7-420f-b3bb-3f77a839930f-kube-api-access-xzwvl\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.783540 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/be322c70-01b7-420f-b3bb-3f77a839930f-frr-sockets\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.783581 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5267fc88-99fb-48cc-b1eb-3cd3963ce8d5-cert\") pod \"frr-k8s-webhook-server-5478bdb765-n7lvn\" (UID: \"5267fc88-99fb-48cc-b1eb-3cd3963ce8d5\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-n7lvn" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.783607 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/be322c70-01b7-420f-b3bb-3f77a839930f-metrics\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.783630 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/be322c70-01b7-420f-b3bb-3f77a839930f-frr-startup\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.783692 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/be322c70-01b7-420f-b3bb-3f77a839930f-frr-conf\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.783756 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkk8j\" (UniqueName: \"kubernetes.io/projected/5267fc88-99fb-48cc-b1eb-3cd3963ce8d5-kube-api-access-fkk8j\") pod \"frr-k8s-webhook-server-5478bdb765-n7lvn\" (UID: \"5267fc88-99fb-48cc-b1eb-3cd3963ce8d5\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-n7lvn" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.789344 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-8pcmd"] Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.885257 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrzv4\" (UniqueName: \"kubernetes.io/projected/394e51b3-8289-44ef-80b6-f14b9b56b5bd-kube-api-access-jrzv4\") pod \"speaker-p7464\" (UID: \"394e51b3-8289-44ef-80b6-f14b9b56b5bd\") " pod="metallb-system/speaker-p7464" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.885317 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/394e51b3-8289-44ef-80b6-f14b9b56b5bd-memberlist\") pod \"speaker-p7464\" (UID: \"394e51b3-8289-44ef-80b6-f14b9b56b5bd\") " pod="metallb-system/speaker-p7464" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.885354 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzwvl\" (UniqueName: \"kubernetes.io/projected/be322c70-01b7-420f-b3bb-3f77a839930f-kube-api-access-xzwvl\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.885389 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/be322c70-01b7-420f-b3bb-3f77a839930f-frr-sockets\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.885416 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5267fc88-99fb-48cc-b1eb-3cd3963ce8d5-cert\") pod \"frr-k8s-webhook-server-5478bdb765-n7lvn\" (UID: \"5267fc88-99fb-48cc-b1eb-3cd3963ce8d5\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-n7lvn" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.885440 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/be322c70-01b7-420f-b3bb-3f77a839930f-metrics\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.885470 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/be322c70-01b7-420f-b3bb-3f77a839930f-frr-startup\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.885495 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ef59525d-d301-4af1-b9c8-26933453b3d9-metrics-certs\") pod \"controller-5d688f5ffc-8pcmd\" (UID: \"ef59525d-d301-4af1-b9c8-26933453b3d9\") " pod="metallb-system/controller-5d688f5ffc-8pcmd" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.885529 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2s6tv\" (UniqueName: \"kubernetes.io/projected/ef59525d-d301-4af1-b9c8-26933453b3d9-kube-api-access-2s6tv\") pod \"controller-5d688f5ffc-8pcmd\" (UID: \"ef59525d-d301-4af1-b9c8-26933453b3d9\") " pod="metallb-system/controller-5d688f5ffc-8pcmd" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.885551 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/394e51b3-8289-44ef-80b6-f14b9b56b5bd-metallb-excludel2\") pod \"speaker-p7464\" (UID: \"394e51b3-8289-44ef-80b6-f14b9b56b5bd\") " pod="metallb-system/speaker-p7464" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.885575 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/be322c70-01b7-420f-b3bb-3f77a839930f-frr-conf\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.885613 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/394e51b3-8289-44ef-80b6-f14b9b56b5bd-metrics-certs\") pod \"speaker-p7464\" (UID: \"394e51b3-8289-44ef-80b6-f14b9b56b5bd\") " pod="metallb-system/speaker-p7464" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.885657 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkk8j\" (UniqueName: \"kubernetes.io/projected/5267fc88-99fb-48cc-b1eb-3cd3963ce8d5-kube-api-access-fkk8j\") pod \"frr-k8s-webhook-server-5478bdb765-n7lvn\" (UID: \"5267fc88-99fb-48cc-b1eb-3cd3963ce8d5\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-n7lvn" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.885684 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/be322c70-01b7-420f-b3bb-3f77a839930f-metrics-certs\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.885719 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/be322c70-01b7-420f-b3bb-3f77a839930f-reloader\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.885753 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ef59525d-d301-4af1-b9c8-26933453b3d9-cert\") pod \"controller-5d688f5ffc-8pcmd\" (UID: \"ef59525d-d301-4af1-b9c8-26933453b3d9\") " pod="metallb-system/controller-5d688f5ffc-8pcmd" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.886365 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/be322c70-01b7-420f-b3bb-3f77a839930f-frr-sockets\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.886435 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/be322c70-01b7-420f-b3bb-3f77a839930f-metrics\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.886618 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/be322c70-01b7-420f-b3bb-3f77a839930f-frr-conf\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.886691 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/be322c70-01b7-420f-b3bb-3f77a839930f-reloader\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.887070 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/be322c70-01b7-420f-b3bb-3f77a839930f-frr-startup\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.892274 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/be322c70-01b7-420f-b3bb-3f77a839930f-metrics-certs\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.901632 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzwvl\" (UniqueName: \"kubernetes.io/projected/be322c70-01b7-420f-b3bb-3f77a839930f-kube-api-access-xzwvl\") pod \"frr-k8s-4wd69\" (UID: \"be322c70-01b7-420f-b3bb-3f77a839930f\") " pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.901745 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5267fc88-99fb-48cc-b1eb-3cd3963ce8d5-cert\") pod \"frr-k8s-webhook-server-5478bdb765-n7lvn\" (UID: \"5267fc88-99fb-48cc-b1eb-3cd3963ce8d5\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-n7lvn" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.908495 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkk8j\" (UniqueName: \"kubernetes.io/projected/5267fc88-99fb-48cc-b1eb-3cd3963ce8d5-kube-api-access-fkk8j\") pod \"frr-k8s-webhook-server-5478bdb765-n7lvn\" (UID: \"5267fc88-99fb-48cc-b1eb-3cd3963ce8d5\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-n7lvn" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.987477 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrzv4\" (UniqueName: \"kubernetes.io/projected/394e51b3-8289-44ef-80b6-f14b9b56b5bd-kube-api-access-jrzv4\") pod \"speaker-p7464\" (UID: \"394e51b3-8289-44ef-80b6-f14b9b56b5bd\") " pod="metallb-system/speaker-p7464" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.987531 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/394e51b3-8289-44ef-80b6-f14b9b56b5bd-memberlist\") pod \"speaker-p7464\" (UID: \"394e51b3-8289-44ef-80b6-f14b9b56b5bd\") " pod="metallb-system/speaker-p7464" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.987603 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ef59525d-d301-4af1-b9c8-26933453b3d9-metrics-certs\") pod \"controller-5d688f5ffc-8pcmd\" (UID: \"ef59525d-d301-4af1-b9c8-26933453b3d9\") " pod="metallb-system/controller-5d688f5ffc-8pcmd" Sep 30 00:25:00 crc kubenswrapper[4809]: E0930 00:25:00.987781 4809 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 30 00:25:00 crc kubenswrapper[4809]: E0930 00:25:00.987846 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/394e51b3-8289-44ef-80b6-f14b9b56b5bd-memberlist podName:394e51b3-8289-44ef-80b6-f14b9b56b5bd nodeName:}" failed. No retries permitted until 2025-09-30 00:25:01.487824953 +0000 UTC m=+952.524074361 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/394e51b3-8289-44ef-80b6-f14b9b56b5bd-memberlist") pod "speaker-p7464" (UID: "394e51b3-8289-44ef-80b6-f14b9b56b5bd") : secret "metallb-memberlist" not found Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.987882 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2s6tv\" (UniqueName: \"kubernetes.io/projected/ef59525d-d301-4af1-b9c8-26933453b3d9-kube-api-access-2s6tv\") pod \"controller-5d688f5ffc-8pcmd\" (UID: \"ef59525d-d301-4af1-b9c8-26933453b3d9\") " pod="metallb-system/controller-5d688f5ffc-8pcmd" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.987908 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/394e51b3-8289-44ef-80b6-f14b9b56b5bd-metallb-excludel2\") pod \"speaker-p7464\" (UID: \"394e51b3-8289-44ef-80b6-f14b9b56b5bd\") " pod="metallb-system/speaker-p7464" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.988771 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/394e51b3-8289-44ef-80b6-f14b9b56b5bd-metallb-excludel2\") pod \"speaker-p7464\" (UID: \"394e51b3-8289-44ef-80b6-f14b9b56b5bd\") " pod="metallb-system/speaker-p7464" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.988903 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/394e51b3-8289-44ef-80b6-f14b9b56b5bd-metrics-certs\") pod \"speaker-p7464\" (UID: \"394e51b3-8289-44ef-80b6-f14b9b56b5bd\") " pod="metallb-system/speaker-p7464" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.989015 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ef59525d-d301-4af1-b9c8-26933453b3d9-cert\") pod \"controller-5d688f5ffc-8pcmd\" (UID: \"ef59525d-d301-4af1-b9c8-26933453b3d9\") " pod="metallb-system/controller-5d688f5ffc-8pcmd" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.990839 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ef59525d-d301-4af1-b9c8-26933453b3d9-metrics-certs\") pod \"controller-5d688f5ffc-8pcmd\" (UID: \"ef59525d-d301-4af1-b9c8-26933453b3d9\") " pod="metallb-system/controller-5d688f5ffc-8pcmd" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.992175 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ef59525d-d301-4af1-b9c8-26933453b3d9-cert\") pod \"controller-5d688f5ffc-8pcmd\" (UID: \"ef59525d-d301-4af1-b9c8-26933453b3d9\") " pod="metallb-system/controller-5d688f5ffc-8pcmd" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.992229 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/394e51b3-8289-44ef-80b6-f14b9b56b5bd-metrics-certs\") pod \"speaker-p7464\" (UID: \"394e51b3-8289-44ef-80b6-f14b9b56b5bd\") " pod="metallb-system/speaker-p7464" Sep 30 00:25:00 crc kubenswrapper[4809]: I0930 00:25:00.995193 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-n7lvn" Sep 30 00:25:01 crc kubenswrapper[4809]: I0930 00:25:01.005144 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2s6tv\" (UniqueName: \"kubernetes.io/projected/ef59525d-d301-4af1-b9c8-26933453b3d9-kube-api-access-2s6tv\") pod \"controller-5d688f5ffc-8pcmd\" (UID: \"ef59525d-d301-4af1-b9c8-26933453b3d9\") " pod="metallb-system/controller-5d688f5ffc-8pcmd" Sep 30 00:25:01 crc kubenswrapper[4809]: I0930 00:25:01.007898 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:01 crc kubenswrapper[4809]: I0930 00:25:01.018053 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrzv4\" (UniqueName: \"kubernetes.io/projected/394e51b3-8289-44ef-80b6-f14b9b56b5bd-kube-api-access-jrzv4\") pod \"speaker-p7464\" (UID: \"394e51b3-8289-44ef-80b6-f14b9b56b5bd\") " pod="metallb-system/speaker-p7464" Sep 30 00:25:01 crc kubenswrapper[4809]: I0930 00:25:01.089440 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-8pcmd" Sep 30 00:25:01 crc kubenswrapper[4809]: I0930 00:25:01.255988 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4wd69" event={"ID":"be322c70-01b7-420f-b3bb-3f77a839930f","Type":"ContainerStarted","Data":"840b16119841ea5b1e7e43aa6f90b95f390d8ae7d6b8fd056f4503a4ec57c475"} Sep 30 00:25:01 crc kubenswrapper[4809]: I0930 00:25:01.392180 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-n7lvn"] Sep 30 00:25:01 crc kubenswrapper[4809]: I0930 00:25:01.499579 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/394e51b3-8289-44ef-80b6-f14b9b56b5bd-memberlist\") pod \"speaker-p7464\" (UID: \"394e51b3-8289-44ef-80b6-f14b9b56b5bd\") " pod="metallb-system/speaker-p7464" Sep 30 00:25:01 crc kubenswrapper[4809]: E0930 00:25:01.499784 4809 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 30 00:25:01 crc kubenswrapper[4809]: E0930 00:25:01.499855 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/394e51b3-8289-44ef-80b6-f14b9b56b5bd-memberlist podName:394e51b3-8289-44ef-80b6-f14b9b56b5bd nodeName:}" failed. No retries permitted until 2025-09-30 00:25:02.49983699 +0000 UTC m=+953.536086398 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/394e51b3-8289-44ef-80b6-f14b9b56b5bd-memberlist") pod "speaker-p7464" (UID: "394e51b3-8289-44ef-80b6-f14b9b56b5bd") : secret "metallb-memberlist" not found Sep 30 00:25:01 crc kubenswrapper[4809]: I0930 00:25:01.503184 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-8pcmd"] Sep 30 00:25:01 crc kubenswrapper[4809]: W0930 00:25:01.503578 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef59525d_d301_4af1_b9c8_26933453b3d9.slice/crio-3aa689173411cbb513f2050daeaf3f638fd25a048ed5eb8cb403b036bc27bc8b WatchSource:0}: Error finding container 3aa689173411cbb513f2050daeaf3f638fd25a048ed5eb8cb403b036bc27bc8b: Status 404 returned error can't find the container with id 3aa689173411cbb513f2050daeaf3f638fd25a048ed5eb8cb403b036bc27bc8b Sep 30 00:25:02 crc kubenswrapper[4809]: I0930 00:25:02.262295 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-n7lvn" event={"ID":"5267fc88-99fb-48cc-b1eb-3cd3963ce8d5","Type":"ContainerStarted","Data":"19fed34ad77a1f678d61443fa45c8ac44e92812253aee150086ca0c377acdcac"} Sep 30 00:25:02 crc kubenswrapper[4809]: I0930 00:25:02.264608 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-8pcmd" event={"ID":"ef59525d-d301-4af1-b9c8-26933453b3d9","Type":"ContainerStarted","Data":"4b1df40c9f6ef36d677a37a2bc1a41c099e19d13d1b6c3de8bfc7faf6d7bb0e1"} Sep 30 00:25:02 crc kubenswrapper[4809]: I0930 00:25:02.264735 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-8pcmd" event={"ID":"ef59525d-d301-4af1-b9c8-26933453b3d9","Type":"ContainerStarted","Data":"efbd26a0711f9554a75cc283bee694eb24e8976f4af9969788666d7cf90bac9f"} Sep 30 00:25:02 crc kubenswrapper[4809]: I0930 00:25:02.264811 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-8pcmd" event={"ID":"ef59525d-d301-4af1-b9c8-26933453b3d9","Type":"ContainerStarted","Data":"3aa689173411cbb513f2050daeaf3f638fd25a048ed5eb8cb403b036bc27bc8b"} Sep 30 00:25:02 crc kubenswrapper[4809]: I0930 00:25:02.265929 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5d688f5ffc-8pcmd" Sep 30 00:25:02 crc kubenswrapper[4809]: I0930 00:25:02.284052 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5d688f5ffc-8pcmd" podStartSLOduration=2.284035432 podStartE2EDuration="2.284035432s" podCreationTimestamp="2025-09-30 00:25:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:25:02.279958183 +0000 UTC m=+953.316207611" watchObservedRunningTime="2025-09-30 00:25:02.284035432 +0000 UTC m=+953.320284850" Sep 30 00:25:02 crc kubenswrapper[4809]: I0930 00:25:02.515158 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/394e51b3-8289-44ef-80b6-f14b9b56b5bd-memberlist\") pod \"speaker-p7464\" (UID: \"394e51b3-8289-44ef-80b6-f14b9b56b5bd\") " pod="metallb-system/speaker-p7464" Sep 30 00:25:02 crc kubenswrapper[4809]: I0930 00:25:02.522705 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/394e51b3-8289-44ef-80b6-f14b9b56b5bd-memberlist\") pod \"speaker-p7464\" (UID: \"394e51b3-8289-44ef-80b6-f14b9b56b5bd\") " pod="metallb-system/speaker-p7464" Sep 30 00:25:02 crc kubenswrapper[4809]: I0930 00:25:02.566946 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-p7464" Sep 30 00:25:02 crc kubenswrapper[4809]: W0930 00:25:02.586846 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod394e51b3_8289_44ef_80b6_f14b9b56b5bd.slice/crio-e2aa31488d477ce84e95f4407132f45089964938e8e0e973b144015f1d90f282 WatchSource:0}: Error finding container e2aa31488d477ce84e95f4407132f45089964938e8e0e973b144015f1d90f282: Status 404 returned error can't find the container with id e2aa31488d477ce84e95f4407132f45089964938e8e0e973b144015f1d90f282 Sep 30 00:25:03 crc kubenswrapper[4809]: I0930 00:25:03.285045 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-p7464" event={"ID":"394e51b3-8289-44ef-80b6-f14b9b56b5bd","Type":"ContainerStarted","Data":"c57eeae0a4e820d5f1c86f96302dbcdd2db6d65e35610ddb982456fdc14e89db"} Sep 30 00:25:03 crc kubenswrapper[4809]: I0930 00:25:03.285350 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-p7464" event={"ID":"394e51b3-8289-44ef-80b6-f14b9b56b5bd","Type":"ContainerStarted","Data":"0ce77170278f438792f504baa26e47f21f47aca1fc3229c5a22ebfbee27dbc7a"} Sep 30 00:25:03 crc kubenswrapper[4809]: I0930 00:25:03.285361 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-p7464" event={"ID":"394e51b3-8289-44ef-80b6-f14b9b56b5bd","Type":"ContainerStarted","Data":"e2aa31488d477ce84e95f4407132f45089964938e8e0e973b144015f1d90f282"} Sep 30 00:25:03 crc kubenswrapper[4809]: I0930 00:25:03.285582 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-p7464" Sep 30 00:25:03 crc kubenswrapper[4809]: I0930 00:25:03.304632 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-p7464" podStartSLOduration=3.304612723 podStartE2EDuration="3.304612723s" podCreationTimestamp="2025-09-30 00:25:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:25:03.301571082 +0000 UTC m=+954.337820510" watchObservedRunningTime="2025-09-30 00:25:03.304612723 +0000 UTC m=+954.340862131" Sep 30 00:25:09 crc kubenswrapper[4809]: I0930 00:25:09.331293 4809 generic.go:334] "Generic (PLEG): container finished" podID="be322c70-01b7-420f-b3bb-3f77a839930f" containerID="bab608a140789954e6520cddc156a790b5f3f1a646cb9edd90a42a7351e7e248" exitCode=0 Sep 30 00:25:09 crc kubenswrapper[4809]: I0930 00:25:09.331529 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4wd69" event={"ID":"be322c70-01b7-420f-b3bb-3f77a839930f","Type":"ContainerDied","Data":"bab608a140789954e6520cddc156a790b5f3f1a646cb9edd90a42a7351e7e248"} Sep 30 00:25:09 crc kubenswrapper[4809]: I0930 00:25:09.333553 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-n7lvn" event={"ID":"5267fc88-99fb-48cc-b1eb-3cd3963ce8d5","Type":"ContainerStarted","Data":"0e8a1d2501d2fa65dea616795b8527da66bd583b48eceb0ab928f261ab885d7d"} Sep 30 00:25:09 crc kubenswrapper[4809]: I0930 00:25:09.333797 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-n7lvn" Sep 30 00:25:09 crc kubenswrapper[4809]: I0930 00:25:09.400277 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-n7lvn" podStartSLOduration=2.239091302 podStartE2EDuration="9.400260701s" podCreationTimestamp="2025-09-30 00:25:00 +0000 UTC" firstStartedPulling="2025-09-30 00:25:01.40208759 +0000 UTC m=+952.438336998" lastFinishedPulling="2025-09-30 00:25:08.563256989 +0000 UTC m=+959.599506397" observedRunningTime="2025-09-30 00:25:09.399048228 +0000 UTC m=+960.435297676" watchObservedRunningTime="2025-09-30 00:25:09.400260701 +0000 UTC m=+960.436510109" Sep 30 00:25:10 crc kubenswrapper[4809]: I0930 00:25:10.342522 4809 generic.go:334] "Generic (PLEG): container finished" podID="be322c70-01b7-420f-b3bb-3f77a839930f" containerID="a99187b2caac5b5c643e84d04bda69afdae309acca71d0419c824fa2d0b3f492" exitCode=0 Sep 30 00:25:10 crc kubenswrapper[4809]: I0930 00:25:10.342601 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4wd69" event={"ID":"be322c70-01b7-420f-b3bb-3f77a839930f","Type":"ContainerDied","Data":"a99187b2caac5b5c643e84d04bda69afdae309acca71d0419c824fa2d0b3f492"} Sep 30 00:25:11 crc kubenswrapper[4809]: I0930 00:25:11.093802 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5d688f5ffc-8pcmd" Sep 30 00:25:11 crc kubenswrapper[4809]: I0930 00:25:11.350728 4809 generic.go:334] "Generic (PLEG): container finished" podID="be322c70-01b7-420f-b3bb-3f77a839930f" containerID="e20bc0a7c489160491726fe2b05afc0b8394930e3954186b1bb65fd846f165a4" exitCode=0 Sep 30 00:25:11 crc kubenswrapper[4809]: I0930 00:25:11.350764 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4wd69" event={"ID":"be322c70-01b7-420f-b3bb-3f77a839930f","Type":"ContainerDied","Data":"e20bc0a7c489160491726fe2b05afc0b8394930e3954186b1bb65fd846f165a4"} Sep 30 00:25:12 crc kubenswrapper[4809]: I0930 00:25:12.371282 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4wd69" event={"ID":"be322c70-01b7-420f-b3bb-3f77a839930f","Type":"ContainerStarted","Data":"0c20727d8fe9fac55c4ec63995126d0baa8fcf1fd6866e66b6e1d92787205c2b"} Sep 30 00:25:12 crc kubenswrapper[4809]: I0930 00:25:12.371334 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4wd69" event={"ID":"be322c70-01b7-420f-b3bb-3f77a839930f","Type":"ContainerStarted","Data":"6422d6bffac0e78b10926485c2061d9251f97262d5d8bbde9bb77d259e94ce46"} Sep 30 00:25:12 crc kubenswrapper[4809]: I0930 00:25:12.371348 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4wd69" event={"ID":"be322c70-01b7-420f-b3bb-3f77a839930f","Type":"ContainerStarted","Data":"ce7b1685e7edab5dd92133c5654c5e85e7b6217afc146f5aee90cad7b3ca3977"} Sep 30 00:25:12 crc kubenswrapper[4809]: I0930 00:25:12.371362 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4wd69" event={"ID":"be322c70-01b7-420f-b3bb-3f77a839930f","Type":"ContainerStarted","Data":"313586542fb63c2d28013b196a86157630dc0590dd3be5fc41e6b101e6c55231"} Sep 30 00:25:12 crc kubenswrapper[4809]: I0930 00:25:12.371375 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4wd69" event={"ID":"be322c70-01b7-420f-b3bb-3f77a839930f","Type":"ContainerStarted","Data":"b52e059507445b9d4a524412a5238c66f56f67c0c18756843b8f249919d637c7"} Sep 30 00:25:12 crc kubenswrapper[4809]: I0930 00:25:12.571097 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-p7464" Sep 30 00:25:13 crc kubenswrapper[4809]: I0930 00:25:13.384197 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4wd69" event={"ID":"be322c70-01b7-420f-b3bb-3f77a839930f","Type":"ContainerStarted","Data":"d8c8e31e804735c03eae329a7841baaf613d36baeca650bab0d17286930c2cde"} Sep 30 00:25:13 crc kubenswrapper[4809]: I0930 00:25:13.384583 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:13 crc kubenswrapper[4809]: I0930 00:25:13.405670 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-4wd69" podStartSLOduration=6.037363777 podStartE2EDuration="13.405647974s" podCreationTimestamp="2025-09-30 00:25:00 +0000 UTC" firstStartedPulling="2025-09-30 00:25:01.17054832 +0000 UTC m=+952.206797728" lastFinishedPulling="2025-09-30 00:25:08.538832507 +0000 UTC m=+959.575081925" observedRunningTime="2025-09-30 00:25:13.403199679 +0000 UTC m=+964.439449107" watchObservedRunningTime="2025-09-30 00:25:13.405647974 +0000 UTC m=+964.441897382" Sep 30 00:25:15 crc kubenswrapper[4809]: I0930 00:25:15.725636 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-x2gp6"] Sep 30 00:25:15 crc kubenswrapper[4809]: I0930 00:25:15.727321 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-x2gp6" Sep 30 00:25:15 crc kubenswrapper[4809]: I0930 00:25:15.730999 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Sep 30 00:25:15 crc kubenswrapper[4809]: I0930 00:25:15.731069 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-6kvg9" Sep 30 00:25:15 crc kubenswrapper[4809]: I0930 00:25:15.731595 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Sep 30 00:25:15 crc kubenswrapper[4809]: I0930 00:25:15.770737 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-x2gp6"] Sep 30 00:25:15 crc kubenswrapper[4809]: I0930 00:25:15.834533 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlkpq\" (UniqueName: \"kubernetes.io/projected/d4b2e117-88d2-4215-9959-12d8b0bc9544-kube-api-access-dlkpq\") pod \"openstack-operator-index-x2gp6\" (UID: \"d4b2e117-88d2-4215-9959-12d8b0bc9544\") " pod="openstack-operators/openstack-operator-index-x2gp6" Sep 30 00:25:15 crc kubenswrapper[4809]: I0930 00:25:15.937248 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlkpq\" (UniqueName: \"kubernetes.io/projected/d4b2e117-88d2-4215-9959-12d8b0bc9544-kube-api-access-dlkpq\") pod \"openstack-operator-index-x2gp6\" (UID: \"d4b2e117-88d2-4215-9959-12d8b0bc9544\") " pod="openstack-operators/openstack-operator-index-x2gp6" Sep 30 00:25:15 crc kubenswrapper[4809]: I0930 00:25:15.955378 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlkpq\" (UniqueName: \"kubernetes.io/projected/d4b2e117-88d2-4215-9959-12d8b0bc9544-kube-api-access-dlkpq\") pod \"openstack-operator-index-x2gp6\" (UID: \"d4b2e117-88d2-4215-9959-12d8b0bc9544\") " pod="openstack-operators/openstack-operator-index-x2gp6" Sep 30 00:25:16 crc kubenswrapper[4809]: I0930 00:25:16.009389 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:16 crc kubenswrapper[4809]: I0930 00:25:16.057157 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-x2gp6" Sep 30 00:25:16 crc kubenswrapper[4809]: I0930 00:25:16.057926 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:16 crc kubenswrapper[4809]: I0930 00:25:16.528548 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-x2gp6"] Sep 30 00:25:17 crc kubenswrapper[4809]: I0930 00:25:17.418362 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-x2gp6" event={"ID":"d4b2e117-88d2-4215-9959-12d8b0bc9544","Type":"ContainerStarted","Data":"630ebf3a0fb708ebd61235db67db2c3e3760034f932043d62bc19cab0b86ff15"} Sep 30 00:25:17 crc kubenswrapper[4809]: I0930 00:25:17.886883 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-x2gp6"] Sep 30 00:25:18 crc kubenswrapper[4809]: I0930 00:25:18.297936 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-ftl9q"] Sep 30 00:25:18 crc kubenswrapper[4809]: I0930 00:25:18.299559 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ftl9q" Sep 30 00:25:18 crc kubenswrapper[4809]: I0930 00:25:18.308151 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-ftl9q"] Sep 30 00:25:18 crc kubenswrapper[4809]: I0930 00:25:18.477969 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlzd5\" (UniqueName: \"kubernetes.io/projected/43194d20-0560-4090-b0f5-2a32b11760db-kube-api-access-rlzd5\") pod \"openstack-operator-index-ftl9q\" (UID: \"43194d20-0560-4090-b0f5-2a32b11760db\") " pod="openstack-operators/openstack-operator-index-ftl9q" Sep 30 00:25:18 crc kubenswrapper[4809]: I0930 00:25:18.579691 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlzd5\" (UniqueName: \"kubernetes.io/projected/43194d20-0560-4090-b0f5-2a32b11760db-kube-api-access-rlzd5\") pod \"openstack-operator-index-ftl9q\" (UID: \"43194d20-0560-4090-b0f5-2a32b11760db\") " pod="openstack-operators/openstack-operator-index-ftl9q" Sep 30 00:25:18 crc kubenswrapper[4809]: I0930 00:25:18.598507 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlzd5\" (UniqueName: \"kubernetes.io/projected/43194d20-0560-4090-b0f5-2a32b11760db-kube-api-access-rlzd5\") pod \"openstack-operator-index-ftl9q\" (UID: \"43194d20-0560-4090-b0f5-2a32b11760db\") " pod="openstack-operators/openstack-operator-index-ftl9q" Sep 30 00:25:18 crc kubenswrapper[4809]: I0930 00:25:18.625531 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ftl9q" Sep 30 00:25:19 crc kubenswrapper[4809]: I0930 00:25:19.274418 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-ftl9q"] Sep 30 00:25:19 crc kubenswrapper[4809]: W0930 00:25:19.284500 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod43194d20_0560_4090_b0f5_2a32b11760db.slice/crio-a36e418f7aa60bcb1427d54b8557537463b9922d18e07c19b149e445ef844589 WatchSource:0}: Error finding container a36e418f7aa60bcb1427d54b8557537463b9922d18e07c19b149e445ef844589: Status 404 returned error can't find the container with id a36e418f7aa60bcb1427d54b8557537463b9922d18e07c19b149e445ef844589 Sep 30 00:25:19 crc kubenswrapper[4809]: I0930 00:25:19.433394 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ftl9q" event={"ID":"43194d20-0560-4090-b0f5-2a32b11760db","Type":"ContainerStarted","Data":"a36e418f7aa60bcb1427d54b8557537463b9922d18e07c19b149e445ef844589"} Sep 30 00:25:19 crc kubenswrapper[4809]: I0930 00:25:19.435210 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-x2gp6" event={"ID":"d4b2e117-88d2-4215-9959-12d8b0bc9544","Type":"ContainerStarted","Data":"fcd073eb8749a886ba83fddee60e61a6ca88851ccba23bb807ff92b5c79cf923"} Sep 30 00:25:19 crc kubenswrapper[4809]: I0930 00:25:19.435318 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-x2gp6" podUID="d4b2e117-88d2-4215-9959-12d8b0bc9544" containerName="registry-server" containerID="cri-o://fcd073eb8749a886ba83fddee60e61a6ca88851ccba23bb807ff92b5c79cf923" gracePeriod=2 Sep 30 00:25:19 crc kubenswrapper[4809]: I0930 00:25:19.453539 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-x2gp6" podStartSLOduration=2.106497918 podStartE2EDuration="4.453517246s" podCreationTimestamp="2025-09-30 00:25:15 +0000 UTC" firstStartedPulling="2025-09-30 00:25:16.543578633 +0000 UTC m=+967.579828041" lastFinishedPulling="2025-09-30 00:25:18.890597971 +0000 UTC m=+969.926847369" observedRunningTime="2025-09-30 00:25:19.449909699 +0000 UTC m=+970.486159107" watchObservedRunningTime="2025-09-30 00:25:19.453517246 +0000 UTC m=+970.489766654" Sep 30 00:25:19 crc kubenswrapper[4809]: I0930 00:25:19.897524 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-x2gp6" Sep 30 00:25:20 crc kubenswrapper[4809]: I0930 00:25:20.014043 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlkpq\" (UniqueName: \"kubernetes.io/projected/d4b2e117-88d2-4215-9959-12d8b0bc9544-kube-api-access-dlkpq\") pod \"d4b2e117-88d2-4215-9959-12d8b0bc9544\" (UID: \"d4b2e117-88d2-4215-9959-12d8b0bc9544\") " Sep 30 00:25:20 crc kubenswrapper[4809]: I0930 00:25:20.020811 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4b2e117-88d2-4215-9959-12d8b0bc9544-kube-api-access-dlkpq" (OuterVolumeSpecName: "kube-api-access-dlkpq") pod "d4b2e117-88d2-4215-9959-12d8b0bc9544" (UID: "d4b2e117-88d2-4215-9959-12d8b0bc9544"). InnerVolumeSpecName "kube-api-access-dlkpq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:25:20 crc kubenswrapper[4809]: I0930 00:25:20.116108 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlkpq\" (UniqueName: \"kubernetes.io/projected/d4b2e117-88d2-4215-9959-12d8b0bc9544-kube-api-access-dlkpq\") on node \"crc\" DevicePath \"\"" Sep 30 00:25:20 crc kubenswrapper[4809]: I0930 00:25:20.446052 4809 generic.go:334] "Generic (PLEG): container finished" podID="d4b2e117-88d2-4215-9959-12d8b0bc9544" containerID="fcd073eb8749a886ba83fddee60e61a6ca88851ccba23bb807ff92b5c79cf923" exitCode=0 Sep 30 00:25:20 crc kubenswrapper[4809]: I0930 00:25:20.446105 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-x2gp6" Sep 30 00:25:20 crc kubenswrapper[4809]: I0930 00:25:20.446179 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-x2gp6" event={"ID":"d4b2e117-88d2-4215-9959-12d8b0bc9544","Type":"ContainerDied","Data":"fcd073eb8749a886ba83fddee60e61a6ca88851ccba23bb807ff92b5c79cf923"} Sep 30 00:25:20 crc kubenswrapper[4809]: I0930 00:25:20.446250 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-x2gp6" event={"ID":"d4b2e117-88d2-4215-9959-12d8b0bc9544","Type":"ContainerDied","Data":"630ebf3a0fb708ebd61235db67db2c3e3760034f932043d62bc19cab0b86ff15"} Sep 30 00:25:20 crc kubenswrapper[4809]: I0930 00:25:20.446273 4809 scope.go:117] "RemoveContainer" containerID="fcd073eb8749a886ba83fddee60e61a6ca88851ccba23bb807ff92b5c79cf923" Sep 30 00:25:20 crc kubenswrapper[4809]: I0930 00:25:20.449298 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ftl9q" event={"ID":"43194d20-0560-4090-b0f5-2a32b11760db","Type":"ContainerStarted","Data":"d72b52f5149533faa18549c58c7cb6b805ba0950b96ead5a29015482599ba1be"} Sep 30 00:25:20 crc kubenswrapper[4809]: I0930 00:25:20.478015 4809 scope.go:117] "RemoveContainer" containerID="fcd073eb8749a886ba83fddee60e61a6ca88851ccba23bb807ff92b5c79cf923" Sep 30 00:25:20 crc kubenswrapper[4809]: E0930 00:25:20.478871 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcd073eb8749a886ba83fddee60e61a6ca88851ccba23bb807ff92b5c79cf923\": container with ID starting with fcd073eb8749a886ba83fddee60e61a6ca88851ccba23bb807ff92b5c79cf923 not found: ID does not exist" containerID="fcd073eb8749a886ba83fddee60e61a6ca88851ccba23bb807ff92b5c79cf923" Sep 30 00:25:20 crc kubenswrapper[4809]: I0930 00:25:20.478911 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcd073eb8749a886ba83fddee60e61a6ca88851ccba23bb807ff92b5c79cf923"} err="failed to get container status \"fcd073eb8749a886ba83fddee60e61a6ca88851ccba23bb807ff92b5c79cf923\": rpc error: code = NotFound desc = could not find container \"fcd073eb8749a886ba83fddee60e61a6ca88851ccba23bb807ff92b5c79cf923\": container with ID starting with fcd073eb8749a886ba83fddee60e61a6ca88851ccba23bb807ff92b5c79cf923 not found: ID does not exist" Sep 30 00:25:20 crc kubenswrapper[4809]: I0930 00:25:20.493488 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-ftl9q" podStartSLOduration=2.447318173 podStartE2EDuration="2.493468945s" podCreationTimestamp="2025-09-30 00:25:18 +0000 UTC" firstStartedPulling="2025-09-30 00:25:19.289669472 +0000 UTC m=+970.325918900" lastFinishedPulling="2025-09-30 00:25:19.335820244 +0000 UTC m=+970.372069672" observedRunningTime="2025-09-30 00:25:20.469910786 +0000 UTC m=+971.506160234" watchObservedRunningTime="2025-09-30 00:25:20.493468945 +0000 UTC m=+971.529718353" Sep 30 00:25:20 crc kubenswrapper[4809]: I0930 00:25:20.494474 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-x2gp6"] Sep 30 00:25:20 crc kubenswrapper[4809]: I0930 00:25:20.500059 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-x2gp6"] Sep 30 00:25:21 crc kubenswrapper[4809]: I0930 00:25:21.003530 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-n7lvn" Sep 30 00:25:21 crc kubenswrapper[4809]: I0930 00:25:21.017530 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-4wd69" Sep 30 00:25:21 crc kubenswrapper[4809]: I0930 00:25:21.706394 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4b2e117-88d2-4215-9959-12d8b0bc9544" path="/var/lib/kubelet/pods/d4b2e117-88d2-4215-9959-12d8b0bc9544/volumes" Sep 30 00:25:28 crc kubenswrapper[4809]: I0930 00:25:28.625984 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-ftl9q" Sep 30 00:25:28 crc kubenswrapper[4809]: I0930 00:25:28.626387 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-ftl9q" Sep 30 00:25:28 crc kubenswrapper[4809]: I0930 00:25:28.661085 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-ftl9q" Sep 30 00:25:29 crc kubenswrapper[4809]: I0930 00:25:29.558691 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-ftl9q" Sep 30 00:25:35 crc kubenswrapper[4809]: I0930 00:25:35.162697 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx"] Sep 30 00:25:35 crc kubenswrapper[4809]: E0930 00:25:35.164541 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4b2e117-88d2-4215-9959-12d8b0bc9544" containerName="registry-server" Sep 30 00:25:35 crc kubenswrapper[4809]: I0930 00:25:35.164667 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4b2e117-88d2-4215-9959-12d8b0bc9544" containerName="registry-server" Sep 30 00:25:35 crc kubenswrapper[4809]: I0930 00:25:35.164938 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4b2e117-88d2-4215-9959-12d8b0bc9544" containerName="registry-server" Sep 30 00:25:35 crc kubenswrapper[4809]: I0930 00:25:35.166382 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx" Sep 30 00:25:35 crc kubenswrapper[4809]: I0930 00:25:35.169417 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-f96nd" Sep 30 00:25:35 crc kubenswrapper[4809]: I0930 00:25:35.186230 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx"] Sep 30 00:25:35 crc kubenswrapper[4809]: I0930 00:25:35.268830 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ed080f95-063a-4f19-bb10-7153531ca913-util\") pod \"67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx\" (UID: \"ed080f95-063a-4f19-bb10-7153531ca913\") " pod="openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx" Sep 30 00:25:35 crc kubenswrapper[4809]: I0930 00:25:35.268872 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62frf\" (UniqueName: \"kubernetes.io/projected/ed080f95-063a-4f19-bb10-7153531ca913-kube-api-access-62frf\") pod \"67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx\" (UID: \"ed080f95-063a-4f19-bb10-7153531ca913\") " pod="openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx" Sep 30 00:25:35 crc kubenswrapper[4809]: I0930 00:25:35.268993 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ed080f95-063a-4f19-bb10-7153531ca913-bundle\") pod \"67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx\" (UID: \"ed080f95-063a-4f19-bb10-7153531ca913\") " pod="openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx" Sep 30 00:25:35 crc kubenswrapper[4809]: I0930 00:25:35.371716 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ed080f95-063a-4f19-bb10-7153531ca913-bundle\") pod \"67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx\" (UID: \"ed080f95-063a-4f19-bb10-7153531ca913\") " pod="openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx" Sep 30 00:25:35 crc kubenswrapper[4809]: I0930 00:25:35.371871 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ed080f95-063a-4f19-bb10-7153531ca913-util\") pod \"67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx\" (UID: \"ed080f95-063a-4f19-bb10-7153531ca913\") " pod="openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx" Sep 30 00:25:35 crc kubenswrapper[4809]: I0930 00:25:35.371924 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62frf\" (UniqueName: \"kubernetes.io/projected/ed080f95-063a-4f19-bb10-7153531ca913-kube-api-access-62frf\") pod \"67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx\" (UID: \"ed080f95-063a-4f19-bb10-7153531ca913\") " pod="openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx" Sep 30 00:25:35 crc kubenswrapper[4809]: I0930 00:25:35.372623 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ed080f95-063a-4f19-bb10-7153531ca913-bundle\") pod \"67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx\" (UID: \"ed080f95-063a-4f19-bb10-7153531ca913\") " pod="openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx" Sep 30 00:25:35 crc kubenswrapper[4809]: I0930 00:25:35.372753 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ed080f95-063a-4f19-bb10-7153531ca913-util\") pod \"67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx\" (UID: \"ed080f95-063a-4f19-bb10-7153531ca913\") " pod="openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx" Sep 30 00:25:35 crc kubenswrapper[4809]: I0930 00:25:35.411605 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62frf\" (UniqueName: \"kubernetes.io/projected/ed080f95-063a-4f19-bb10-7153531ca913-kube-api-access-62frf\") pod \"67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx\" (UID: \"ed080f95-063a-4f19-bb10-7153531ca913\") " pod="openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx" Sep 30 00:25:35 crc kubenswrapper[4809]: I0930 00:25:35.484382 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx" Sep 30 00:25:35 crc kubenswrapper[4809]: I0930 00:25:35.901686 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx"] Sep 30 00:25:36 crc kubenswrapper[4809]: I0930 00:25:36.590118 4809 generic.go:334] "Generic (PLEG): container finished" podID="ed080f95-063a-4f19-bb10-7153531ca913" containerID="e8c012f0eaf3dc9217d748fbc1110763061bcca6caf8e6a1e0265e853f910486" exitCode=0 Sep 30 00:25:36 crc kubenswrapper[4809]: I0930 00:25:36.590233 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx" event={"ID":"ed080f95-063a-4f19-bb10-7153531ca913","Type":"ContainerDied","Data":"e8c012f0eaf3dc9217d748fbc1110763061bcca6caf8e6a1e0265e853f910486"} Sep 30 00:25:36 crc kubenswrapper[4809]: I0930 00:25:36.590399 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx" event={"ID":"ed080f95-063a-4f19-bb10-7153531ca913","Type":"ContainerStarted","Data":"9cd2c13777b9c88000b9c5cc42e1977505efa9bad99491a4d1156358c9f7af62"} Sep 30 00:25:37 crc kubenswrapper[4809]: I0930 00:25:37.602936 4809 generic.go:334] "Generic (PLEG): container finished" podID="ed080f95-063a-4f19-bb10-7153531ca913" containerID="eeba5d9757f6595e4d07ec9ef74eb569cff9485d88cbbc20b2eac718c6e9b6c4" exitCode=0 Sep 30 00:25:37 crc kubenswrapper[4809]: I0930 00:25:37.603286 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx" event={"ID":"ed080f95-063a-4f19-bb10-7153531ca913","Type":"ContainerDied","Data":"eeba5d9757f6595e4d07ec9ef74eb569cff9485d88cbbc20b2eac718c6e9b6c4"} Sep 30 00:25:38 crc kubenswrapper[4809]: I0930 00:25:38.618180 4809 generic.go:334] "Generic (PLEG): container finished" podID="ed080f95-063a-4f19-bb10-7153531ca913" containerID="e088685fc660531f21171884a420dbbdf79db8708ea2180098767cfb6a61941b" exitCode=0 Sep 30 00:25:38 crc kubenswrapper[4809]: I0930 00:25:38.618248 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx" event={"ID":"ed080f95-063a-4f19-bb10-7153531ca913","Type":"ContainerDied","Data":"e088685fc660531f21171884a420dbbdf79db8708ea2180098767cfb6a61941b"} Sep 30 00:25:40 crc kubenswrapper[4809]: I0930 00:25:40.000578 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx" Sep 30 00:25:40 crc kubenswrapper[4809]: I0930 00:25:40.080592 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ed080f95-063a-4f19-bb10-7153531ca913-util\") pod \"ed080f95-063a-4f19-bb10-7153531ca913\" (UID: \"ed080f95-063a-4f19-bb10-7153531ca913\") " Sep 30 00:25:40 crc kubenswrapper[4809]: I0930 00:25:40.080626 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62frf\" (UniqueName: \"kubernetes.io/projected/ed080f95-063a-4f19-bb10-7153531ca913-kube-api-access-62frf\") pod \"ed080f95-063a-4f19-bb10-7153531ca913\" (UID: \"ed080f95-063a-4f19-bb10-7153531ca913\") " Sep 30 00:25:40 crc kubenswrapper[4809]: I0930 00:25:40.080662 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ed080f95-063a-4f19-bb10-7153531ca913-bundle\") pod \"ed080f95-063a-4f19-bb10-7153531ca913\" (UID: \"ed080f95-063a-4f19-bb10-7153531ca913\") " Sep 30 00:25:40 crc kubenswrapper[4809]: I0930 00:25:40.081516 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed080f95-063a-4f19-bb10-7153531ca913-bundle" (OuterVolumeSpecName: "bundle") pod "ed080f95-063a-4f19-bb10-7153531ca913" (UID: "ed080f95-063a-4f19-bb10-7153531ca913"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:25:40 crc kubenswrapper[4809]: I0930 00:25:40.087286 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed080f95-063a-4f19-bb10-7153531ca913-kube-api-access-62frf" (OuterVolumeSpecName: "kube-api-access-62frf") pod "ed080f95-063a-4f19-bb10-7153531ca913" (UID: "ed080f95-063a-4f19-bb10-7153531ca913"). InnerVolumeSpecName "kube-api-access-62frf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:25:40 crc kubenswrapper[4809]: I0930 00:25:40.114896 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed080f95-063a-4f19-bb10-7153531ca913-util" (OuterVolumeSpecName: "util") pod "ed080f95-063a-4f19-bb10-7153531ca913" (UID: "ed080f95-063a-4f19-bb10-7153531ca913"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:25:40 crc kubenswrapper[4809]: I0930 00:25:40.182550 4809 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ed080f95-063a-4f19-bb10-7153531ca913-util\") on node \"crc\" DevicePath \"\"" Sep 30 00:25:40 crc kubenswrapper[4809]: I0930 00:25:40.183012 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62frf\" (UniqueName: \"kubernetes.io/projected/ed080f95-063a-4f19-bb10-7153531ca913-kube-api-access-62frf\") on node \"crc\" DevicePath \"\"" Sep 30 00:25:40 crc kubenswrapper[4809]: I0930 00:25:40.183061 4809 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ed080f95-063a-4f19-bb10-7153531ca913-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:25:40 crc kubenswrapper[4809]: I0930 00:25:40.638375 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx" event={"ID":"ed080f95-063a-4f19-bb10-7153531ca913","Type":"ContainerDied","Data":"9cd2c13777b9c88000b9c5cc42e1977505efa9bad99491a4d1156358c9f7af62"} Sep 30 00:25:40 crc kubenswrapper[4809]: I0930 00:25:40.638423 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9cd2c13777b9c88000b9c5cc42e1977505efa9bad99491a4d1156358c9f7af62" Sep 30 00:25:40 crc kubenswrapper[4809]: I0930 00:25:40.638739 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx" Sep 30 00:25:59 crc kubenswrapper[4809]: I0930 00:25:59.550218 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6cb7c558-p8rpq"] Sep 30 00:25:59 crc kubenswrapper[4809]: E0930 00:25:59.551032 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed080f95-063a-4f19-bb10-7153531ca913" containerName="extract" Sep 30 00:25:59 crc kubenswrapper[4809]: I0930 00:25:59.551048 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed080f95-063a-4f19-bb10-7153531ca913" containerName="extract" Sep 30 00:25:59 crc kubenswrapper[4809]: E0930 00:25:59.551057 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed080f95-063a-4f19-bb10-7153531ca913" containerName="util" Sep 30 00:25:59 crc kubenswrapper[4809]: I0930 00:25:59.551063 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed080f95-063a-4f19-bb10-7153531ca913" containerName="util" Sep 30 00:25:59 crc kubenswrapper[4809]: E0930 00:25:59.551077 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed080f95-063a-4f19-bb10-7153531ca913" containerName="pull" Sep 30 00:25:59 crc kubenswrapper[4809]: I0930 00:25:59.551083 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed080f95-063a-4f19-bb10-7153531ca913" containerName="pull" Sep 30 00:25:59 crc kubenswrapper[4809]: I0930 00:25:59.551201 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed080f95-063a-4f19-bb10-7153531ca913" containerName="extract" Sep 30 00:25:59 crc kubenswrapper[4809]: I0930 00:25:59.551936 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6cb7c558-p8rpq" Sep 30 00:25:59 crc kubenswrapper[4809]: I0930 00:25:59.554087 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-qxskb" Sep 30 00:25:59 crc kubenswrapper[4809]: I0930 00:25:59.577765 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6cb7c558-p8rpq"] Sep 30 00:25:59 crc kubenswrapper[4809]: I0930 00:25:59.636841 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgmng\" (UniqueName: \"kubernetes.io/projected/9e4a0ee0-13ef-4f72-839c-da921dc76067-kube-api-access-qgmng\") pod \"openstack-operator-controller-operator-6cb7c558-p8rpq\" (UID: \"9e4a0ee0-13ef-4f72-839c-da921dc76067\") " pod="openstack-operators/openstack-operator-controller-operator-6cb7c558-p8rpq" Sep 30 00:25:59 crc kubenswrapper[4809]: I0930 00:25:59.743497 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgmng\" (UniqueName: \"kubernetes.io/projected/9e4a0ee0-13ef-4f72-839c-da921dc76067-kube-api-access-qgmng\") pod \"openstack-operator-controller-operator-6cb7c558-p8rpq\" (UID: \"9e4a0ee0-13ef-4f72-839c-da921dc76067\") " pod="openstack-operators/openstack-operator-controller-operator-6cb7c558-p8rpq" Sep 30 00:25:59 crc kubenswrapper[4809]: I0930 00:25:59.778741 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgmng\" (UniqueName: \"kubernetes.io/projected/9e4a0ee0-13ef-4f72-839c-da921dc76067-kube-api-access-qgmng\") pod \"openstack-operator-controller-operator-6cb7c558-p8rpq\" (UID: \"9e4a0ee0-13ef-4f72-839c-da921dc76067\") " pod="openstack-operators/openstack-operator-controller-operator-6cb7c558-p8rpq" Sep 30 00:25:59 crc kubenswrapper[4809]: I0930 00:25:59.920256 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6cb7c558-p8rpq" Sep 30 00:26:00 crc kubenswrapper[4809]: I0930 00:26:00.359231 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6cb7c558-p8rpq"] Sep 30 00:26:00 crc kubenswrapper[4809]: I0930 00:26:00.817046 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6cb7c558-p8rpq" event={"ID":"9e4a0ee0-13ef-4f72-839c-da921dc76067","Type":"ContainerStarted","Data":"0d82076d69a4891bed5e73aea8a8361093881fb6832929e0d1da66c6f6e5e79a"} Sep 30 00:26:04 crc kubenswrapper[4809]: I0930 00:26:04.850610 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6cb7c558-p8rpq" event={"ID":"9e4a0ee0-13ef-4f72-839c-da921dc76067","Type":"ContainerStarted","Data":"e787680e310dd42875e4d2723aea65cf35ef416778415c087da2dded22d1115d"} Sep 30 00:26:06 crc kubenswrapper[4809]: I0930 00:26:06.875107 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6cb7c558-p8rpq" event={"ID":"9e4a0ee0-13ef-4f72-839c-da921dc76067","Type":"ContainerStarted","Data":"53b894909a9ae56f6bcd501861c720e4ee73ff37a37dda625b78c4a0f8750493"} Sep 30 00:26:06 crc kubenswrapper[4809]: I0930 00:26:06.875869 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-6cb7c558-p8rpq" Sep 30 00:26:06 crc kubenswrapper[4809]: I0930 00:26:06.911726 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-6cb7c558-p8rpq" podStartSLOduration=2.158706882 podStartE2EDuration="7.911703672s" podCreationTimestamp="2025-09-30 00:25:59 +0000 UTC" firstStartedPulling="2025-09-30 00:26:00.371384096 +0000 UTC m=+1011.407633504" lastFinishedPulling="2025-09-30 00:26:06.124380846 +0000 UTC m=+1017.160630294" observedRunningTime="2025-09-30 00:26:06.90790462 +0000 UTC m=+1017.944154048" watchObservedRunningTime="2025-09-30 00:26:06.911703672 +0000 UTC m=+1017.947953100" Sep 30 00:26:09 crc kubenswrapper[4809]: I0930 00:26:09.924439 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-6cb7c558-p8rpq" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.100945 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-2jhx6"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.102973 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-2jhx6" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.105481 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-9nzfg" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.119447 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-4tp9f"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.121660 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-4tp9f" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.124661 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-wckmt" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.130454 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-6qsgb"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.131747 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-6qsgb" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.133813 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-8hwwv" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.145079 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-2jhx6"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.163275 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-4tp9f"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.172245 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-9dbvw"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.173352 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9dbvw" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.178024 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-bmwsn" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.185402 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-9dbvw"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.211500 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-6qsgb"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.241024 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46w9c\" (UniqueName: \"kubernetes.io/projected/6306e69b-dd84-4e09-b462-71a7f858d351-kube-api-access-46w9c\") pod \"cinder-operator-controller-manager-644bddb6d8-2jhx6\" (UID: \"6306e69b-dd84-4e09-b462-71a7f858d351\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-2jhx6" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.246199 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvzfn\" (UniqueName: \"kubernetes.io/projected/e5433da0-c4a6-46a5-b936-bf361aca6946-kube-api-access-vvzfn\") pod \"barbican-operator-controller-manager-6ff8b75857-4tp9f\" (UID: \"e5433da0-c4a6-46a5-b936-bf361aca6946\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-4tp9f" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.272938 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-jbqng"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.274345 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-jbqng" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.280918 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-7v2v7" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.295806 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-v7sp5"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.297266 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-v7sp5" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.309416 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-kc5jk" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.335771 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-jbqng"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.347572 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvzfn\" (UniqueName: \"kubernetes.io/projected/e5433da0-c4a6-46a5-b936-bf361aca6946-kube-api-access-vvzfn\") pod \"barbican-operator-controller-manager-6ff8b75857-4tp9f\" (UID: \"e5433da0-c4a6-46a5-b936-bf361aca6946\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-4tp9f" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.347634 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zl2lf\" (UniqueName: \"kubernetes.io/projected/0123e31b-cbb3-4545-b679-c7b27eeaebba-kube-api-access-zl2lf\") pod \"horizon-operator-controller-manager-9f4696d94-v7sp5\" (UID: \"0123e31b-cbb3-4545-b679-c7b27eeaebba\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-v7sp5" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.347690 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46w9c\" (UniqueName: \"kubernetes.io/projected/6306e69b-dd84-4e09-b462-71a7f858d351-kube-api-access-46w9c\") pod \"cinder-operator-controller-manager-644bddb6d8-2jhx6\" (UID: \"6306e69b-dd84-4e09-b462-71a7f858d351\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-2jhx6" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.347742 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncgqf\" (UniqueName: \"kubernetes.io/projected/11ecdd7b-edcc-44fd-9a10-ab2f58a40430-kube-api-access-ncgqf\") pod \"glance-operator-controller-manager-84958c4d49-9dbvw\" (UID: \"11ecdd7b-edcc-44fd-9a10-ab2f58a40430\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9dbvw" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.347794 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29c6j\" (UniqueName: \"kubernetes.io/projected/2a80b8c2-74ea-4a89-a241-026ffb8c2b1a-kube-api-access-29c6j\") pod \"heat-operator-controller-manager-5d889d78cf-jbqng\" (UID: \"2a80b8c2-74ea-4a89-a241-026ffb8c2b1a\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-jbqng" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.347824 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vwc2\" (UniqueName: \"kubernetes.io/projected/70a9a1c8-b1be-4c24-b2da-b0d75b5aaabb-kube-api-access-9vwc2\") pod \"designate-operator-controller-manager-84f4f7b77b-6qsgb\" (UID: \"70a9a1c8-b1be-4c24-b2da-b0d75b5aaabb\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-6qsgb" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.353723 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-v7sp5"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.382722 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-njdxf"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.384451 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-njdxf" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.385137 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvzfn\" (UniqueName: \"kubernetes.io/projected/e5433da0-c4a6-46a5-b936-bf361aca6946-kube-api-access-vvzfn\") pod \"barbican-operator-controller-manager-6ff8b75857-4tp9f\" (UID: \"e5433da0-c4a6-46a5-b936-bf361aca6946\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-4tp9f" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.389978 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-8w7dx" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.407081 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-jf4kc"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.408382 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-jf4kc" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.410219 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.410540 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-mdmqm" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.414221 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46w9c\" (UniqueName: \"kubernetes.io/projected/6306e69b-dd84-4e09-b462-71a7f858d351-kube-api-access-46w9c\") pod \"cinder-operator-controller-manager-644bddb6d8-2jhx6\" (UID: \"6306e69b-dd84-4e09-b462-71a7f858d351\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-2jhx6" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.425336 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-s8j8c"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.426585 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-s8j8c" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.426923 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-2jhx6" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.432031 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-6fp87" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.441600 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-4tp9f" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.443078 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-jf4kc"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.450145 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncgqf\" (UniqueName: \"kubernetes.io/projected/11ecdd7b-edcc-44fd-9a10-ab2f58a40430-kube-api-access-ncgqf\") pod \"glance-operator-controller-manager-84958c4d49-9dbvw\" (UID: \"11ecdd7b-edcc-44fd-9a10-ab2f58a40430\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9dbvw" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.450184 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjn4f\" (UniqueName: \"kubernetes.io/projected/904e7639-6336-4012-a8f3-2b6e9c134a1f-kube-api-access-mjn4f\") pod \"keystone-operator-controller-manager-5bd55b4bff-s8j8c\" (UID: \"904e7639-6336-4012-a8f3-2b6e9c134a1f\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-s8j8c" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.450231 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29c6j\" (UniqueName: \"kubernetes.io/projected/2a80b8c2-74ea-4a89-a241-026ffb8c2b1a-kube-api-access-29c6j\") pod \"heat-operator-controller-manager-5d889d78cf-jbqng\" (UID: \"2a80b8c2-74ea-4a89-a241-026ffb8c2b1a\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-jbqng" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.450251 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vwc2\" (UniqueName: \"kubernetes.io/projected/70a9a1c8-b1be-4c24-b2da-b0d75b5aaabb-kube-api-access-9vwc2\") pod \"designate-operator-controller-manager-84f4f7b77b-6qsgb\" (UID: \"70a9a1c8-b1be-4c24-b2da-b0d75b5aaabb\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-6qsgb" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.450320 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b-cert\") pod \"infra-operator-controller-manager-7d857cc749-jf4kc\" (UID: \"aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-jf4kc" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.450337 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwj98\" (UniqueName: \"kubernetes.io/projected/aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b-kube-api-access-gwj98\") pod \"infra-operator-controller-manager-7d857cc749-jf4kc\" (UID: \"aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-jf4kc" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.450363 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zl2lf\" (UniqueName: \"kubernetes.io/projected/0123e31b-cbb3-4545-b679-c7b27eeaebba-kube-api-access-zl2lf\") pod \"horizon-operator-controller-manager-9f4696d94-v7sp5\" (UID: \"0123e31b-cbb3-4545-b679-c7b27eeaebba\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-v7sp5" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.450383 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmcj4\" (UniqueName: \"kubernetes.io/projected/a44af2a0-4faa-4b9d-a426-f137586b0844-kube-api-access-bmcj4\") pod \"ironic-operator-controller-manager-7975b88857-njdxf\" (UID: \"a44af2a0-4faa-4b9d-a426-f137586b0844\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-njdxf" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.457604 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-njdxf"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.475018 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-s8j8c"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.502575 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-7t8jm"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.518277 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-7t8jm" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.504836 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncgqf\" (UniqueName: \"kubernetes.io/projected/11ecdd7b-edcc-44fd-9a10-ab2f58a40430-kube-api-access-ncgqf\") pod \"glance-operator-controller-manager-84958c4d49-9dbvw\" (UID: \"11ecdd7b-edcc-44fd-9a10-ab2f58a40430\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9dbvw" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.516389 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vwc2\" (UniqueName: \"kubernetes.io/projected/70a9a1c8-b1be-4c24-b2da-b0d75b5aaabb-kube-api-access-9vwc2\") pod \"designate-operator-controller-manager-84f4f7b77b-6qsgb\" (UID: \"70a9a1c8-b1be-4c24-b2da-b0d75b5aaabb\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-6qsgb" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.527456 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29c6j\" (UniqueName: \"kubernetes.io/projected/2a80b8c2-74ea-4a89-a241-026ffb8c2b1a-kube-api-access-29c6j\") pod \"heat-operator-controller-manager-5d889d78cf-jbqng\" (UID: \"2a80b8c2-74ea-4a89-a241-026ffb8c2b1a\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-jbqng" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.527536 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-5fpgn"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.532329 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-qblrg" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.542519 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zl2lf\" (UniqueName: \"kubernetes.io/projected/0123e31b-cbb3-4545-b679-c7b27eeaebba-kube-api-access-zl2lf\") pod \"horizon-operator-controller-manager-9f4696d94-v7sp5\" (UID: \"0123e31b-cbb3-4545-b679-c7b27eeaebba\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-v7sp5" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.548228 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-7t8jm"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.548357 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-5fpgn" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.551892 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjx8m\" (UniqueName: \"kubernetes.io/projected/4a314eca-cb77-46bf-bebb-0a2b6910259d-kube-api-access-gjx8m\") pod \"mariadb-operator-controller-manager-88c7-5fpgn\" (UID: \"4a314eca-cb77-46bf-bebb-0a2b6910259d\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-5fpgn" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.551949 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b-cert\") pod \"infra-operator-controller-manager-7d857cc749-jf4kc\" (UID: \"aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-jf4kc" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.551967 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwj98\" (UniqueName: \"kubernetes.io/projected/aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b-kube-api-access-gwj98\") pod \"infra-operator-controller-manager-7d857cc749-jf4kc\" (UID: \"aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-jf4kc" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.551991 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmcj4\" (UniqueName: \"kubernetes.io/projected/a44af2a0-4faa-4b9d-a426-f137586b0844-kube-api-access-bmcj4\") pod \"ironic-operator-controller-manager-7975b88857-njdxf\" (UID: \"a44af2a0-4faa-4b9d-a426-f137586b0844\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-njdxf" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.552025 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjn4f\" (UniqueName: \"kubernetes.io/projected/904e7639-6336-4012-a8f3-2b6e9c134a1f-kube-api-access-mjn4f\") pod \"keystone-operator-controller-manager-5bd55b4bff-s8j8c\" (UID: \"904e7639-6336-4012-a8f3-2b6e9c134a1f\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-s8j8c" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.552044 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22b2p\" (UniqueName: \"kubernetes.io/projected/8817a592-d1b1-4695-b0fe-71b93371e5ea-kube-api-access-22b2p\") pod \"manila-operator-controller-manager-6d68dbc695-7t8jm\" (UID: \"8817a592-d1b1-4695-b0fe-71b93371e5ea\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-7t8jm" Sep 30 00:26:35 crc kubenswrapper[4809]: E0930 00:26:35.552195 4809 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Sep 30 00:26:35 crc kubenswrapper[4809]: E0930 00:26:35.552240 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b-cert podName:aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b nodeName:}" failed. No retries permitted until 2025-09-30 00:26:36.052222065 +0000 UTC m=+1047.088471473 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b-cert") pod "infra-operator-controller-manager-7d857cc749-jf4kc" (UID: "aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b") : secret "infra-operator-webhook-server-cert" not found Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.552775 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-gtbqz" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.580720 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-5fpgn"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.586488 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwj98\" (UniqueName: \"kubernetes.io/projected/aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b-kube-api-access-gwj98\") pod \"infra-operator-controller-manager-7d857cc749-jf4kc\" (UID: \"aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-jf4kc" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.588942 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjn4f\" (UniqueName: \"kubernetes.io/projected/904e7639-6336-4012-a8f3-2b6e9c134a1f-kube-api-access-mjn4f\") pod \"keystone-operator-controller-manager-5bd55b4bff-s8j8c\" (UID: \"904e7639-6336-4012-a8f3-2b6e9c134a1f\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-s8j8c" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.596341 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmcj4\" (UniqueName: \"kubernetes.io/projected/a44af2a0-4faa-4b9d-a426-f137586b0844-kube-api-access-bmcj4\") pod \"ironic-operator-controller-manager-7975b88857-njdxf\" (UID: \"a44af2a0-4faa-4b9d-a426-f137586b0844\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-njdxf" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.600443 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-njdxf" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.620942 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-jbqng" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.636264 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-v7sp5" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.645985 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-s8j8c" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.653788 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjx8m\" (UniqueName: \"kubernetes.io/projected/4a314eca-cb77-46bf-bebb-0a2b6910259d-kube-api-access-gjx8m\") pod \"mariadb-operator-controller-manager-88c7-5fpgn\" (UID: \"4a314eca-cb77-46bf-bebb-0a2b6910259d\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-5fpgn" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.653934 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22b2p\" (UniqueName: \"kubernetes.io/projected/8817a592-d1b1-4695-b0fe-71b93371e5ea-kube-api-access-22b2p\") pod \"manila-operator-controller-manager-6d68dbc695-7t8jm\" (UID: \"8817a592-d1b1-4695-b0fe-71b93371e5ea\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-7t8jm" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.672478 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-xcmjw"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.673981 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-xcmjw" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.680576 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-mjbkh" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.683956 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjx8m\" (UniqueName: \"kubernetes.io/projected/4a314eca-cb77-46bf-bebb-0a2b6910259d-kube-api-access-gjx8m\") pod \"mariadb-operator-controller-manager-88c7-5fpgn\" (UID: \"4a314eca-cb77-46bf-bebb-0a2b6910259d\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-5fpgn" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.696088 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-p85rd"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.697989 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-p85rd" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.704867 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-5fpgn" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.705811 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-t4r8f" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.713740 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22b2p\" (UniqueName: \"kubernetes.io/projected/8817a592-d1b1-4695-b0fe-71b93371e5ea-kube-api-access-22b2p\") pod \"manila-operator-controller-manager-6d68dbc695-7t8jm\" (UID: \"8817a592-d1b1-4695-b0fe-71b93371e5ea\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-7t8jm" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.734443 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-xcmjw"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.734483 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-p85rd"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.740608 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-tqr46"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.742082 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-tqr46" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.745367 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-wwkw9" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.763706 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-tqr46"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.764614 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-6qsgb" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.764985 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-744fl\" (UniqueName: \"kubernetes.io/projected/d584c96f-2058-47bb-a205-672cebe71309-kube-api-access-744fl\") pod \"nova-operator-controller-manager-c7c776c96-p85rd\" (UID: \"d584c96f-2058-47bb-a205-672cebe71309\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-p85rd" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.765129 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnlhx\" (UniqueName: \"kubernetes.io/projected/d5b3350c-20ba-4b71-9d17-837992ec8740-kube-api-access-hnlhx\") pod \"octavia-operator-controller-manager-76fcc6dc7c-tqr46\" (UID: \"d5b3350c-20ba-4b71-9d17-837992ec8740\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-tqr46" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.765172 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgvfb\" (UniqueName: \"kubernetes.io/projected/892fd48f-a2bf-4c2e-8167-1259635511a2-kube-api-access-vgvfb\") pod \"neutron-operator-controller-manager-64d7b59854-xcmjw\" (UID: \"892fd48f-a2bf-4c2e-8167-1259635511a2\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-xcmjw" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.781197 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-jjdj8"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.782754 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-jjdj8" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.790475 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-f4wfr" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.790491 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.797958 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-h8xbq"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.802300 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-h8xbq" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.809842 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9dbvw" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.810429 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-ll8nt" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.823901 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-7ncwh"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.840335 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-7ncwh" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.849579 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-lsnft" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.903247 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnlhx\" (UniqueName: \"kubernetes.io/projected/d5b3350c-20ba-4b71-9d17-837992ec8740-kube-api-access-hnlhx\") pod \"octavia-operator-controller-manager-76fcc6dc7c-tqr46\" (UID: \"d5b3350c-20ba-4b71-9d17-837992ec8740\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-tqr46" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.903303 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgvfb\" (UniqueName: \"kubernetes.io/projected/892fd48f-a2bf-4c2e-8167-1259635511a2-kube-api-access-vgvfb\") pod \"neutron-operator-controller-manager-64d7b59854-xcmjw\" (UID: \"892fd48f-a2bf-4c2e-8167-1259635511a2\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-xcmjw" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.903461 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-744fl\" (UniqueName: \"kubernetes.io/projected/d584c96f-2058-47bb-a205-672cebe71309-kube-api-access-744fl\") pod \"nova-operator-controller-manager-c7c776c96-p85rd\" (UID: \"d584c96f-2058-47bb-a205-672cebe71309\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-p85rd" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.910412 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-mqwrp"] Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.939348 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-mqwrp" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.939863 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-744fl\" (UniqueName: \"kubernetes.io/projected/d584c96f-2058-47bb-a205-672cebe71309-kube-api-access-744fl\") pod \"nova-operator-controller-manager-c7c776c96-p85rd\" (UID: \"d584c96f-2058-47bb-a205-672cebe71309\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-p85rd" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.942719 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgvfb\" (UniqueName: \"kubernetes.io/projected/892fd48f-a2bf-4c2e-8167-1259635511a2-kube-api-access-vgvfb\") pod \"neutron-operator-controller-manager-64d7b59854-xcmjw\" (UID: \"892fd48f-a2bf-4c2e-8167-1259635511a2\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-xcmjw" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.950468 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnlhx\" (UniqueName: \"kubernetes.io/projected/d5b3350c-20ba-4b71-9d17-837992ec8740-kube-api-access-hnlhx\") pod \"octavia-operator-controller-manager-76fcc6dc7c-tqr46\" (UID: \"d5b3350c-20ba-4b71-9d17-837992ec8740\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-tqr46" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.957706 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-8fnkj" Sep 30 00:26:35 crc kubenswrapper[4809]: I0930 00:26:35.975355 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-7t8jm" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.035883 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-7ncwh"] Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.037017 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5r4xq\" (UniqueName: \"kubernetes.io/projected/d81fb42b-3e73-46fd-a522-ed87475d4a89-kube-api-access-5r4xq\") pod \"ovn-operator-controller-manager-9976ff44c-h8xbq\" (UID: \"d81fb42b-3e73-46fd-a522-ed87475d4a89\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-h8xbq" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.037917 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdszn\" (UniqueName: \"kubernetes.io/projected/2acf945e-a5dd-457a-b4d6-4e487056b64a-kube-api-access-zdszn\") pod \"openstack-baremetal-operator-controller-manager-6d776955-jjdj8\" (UID: \"2acf945e-a5dd-457a-b4d6-4e487056b64a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-jjdj8" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.038123 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2acf945e-a5dd-457a-b4d6-4e487056b64a-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-jjdj8\" (UID: \"2acf945e-a5dd-457a-b4d6-4e487056b64a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-jjdj8" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.038309 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwjgm\" (UniqueName: \"kubernetes.io/projected/8ef1bec4-7f6d-472d-b92d-495e3f87ae16-kube-api-access-fwjgm\") pod \"placement-operator-controller-manager-589c58c6c-7ncwh\" (UID: \"8ef1bec4-7f6d-472d-b92d-495e3f87ae16\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-7ncwh" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.040746 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-h8xbq"] Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.052729 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-jjdj8"] Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.061233 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-84b8546f9c-jzhxh"] Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.063027 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-84b8546f9c-jzhxh" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.068046 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-wqkpx" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.068229 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-mqwrp"] Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.085952 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-84b8546f9c-jzhxh"] Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.096684 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-xcmjw" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.102743 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-qtpm9"] Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.105917 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-qtpm9" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.114218 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-cxs8b" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.116117 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-qtpm9"] Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.126075 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-nshn9"] Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.127745 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-nshn9" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.135662 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-8rbpp" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.139520 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5r4xq\" (UniqueName: \"kubernetes.io/projected/d81fb42b-3e73-46fd-a522-ed87475d4a89-kube-api-access-5r4xq\") pod \"ovn-operator-controller-manager-9976ff44c-h8xbq\" (UID: \"d81fb42b-3e73-46fd-a522-ed87475d4a89\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-h8xbq" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.139570 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdszn\" (UniqueName: \"kubernetes.io/projected/2acf945e-a5dd-457a-b4d6-4e487056b64a-kube-api-access-zdszn\") pod \"openstack-baremetal-operator-controller-manager-6d776955-jjdj8\" (UID: \"2acf945e-a5dd-457a-b4d6-4e487056b64a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-jjdj8" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.139600 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjnwz\" (UniqueName: \"kubernetes.io/projected/842e3f23-bd61-4237-8739-1ac1ccb2fd22-kube-api-access-xjnwz\") pod \"swift-operator-controller-manager-bc7dc7bd9-mqwrp\" (UID: \"842e3f23-bd61-4237-8739-1ac1ccb2fd22\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-mqwrp" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.139622 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hslzv\" (UniqueName: \"kubernetes.io/projected/445fd9f7-790d-42ea-9e64-183158dc1211-kube-api-access-hslzv\") pod \"telemetry-operator-controller-manager-84b8546f9c-jzhxh\" (UID: \"445fd9f7-790d-42ea-9e64-183158dc1211\") " pod="openstack-operators/telemetry-operator-controller-manager-84b8546f9c-jzhxh" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.139668 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlwfx\" (UniqueName: \"kubernetes.io/projected/9728ab12-7cab-4897-9ba6-b913a15c2eed-kube-api-access-jlwfx\") pod \"test-operator-controller-manager-f66b554c6-qtpm9\" (UID: \"9728ab12-7cab-4897-9ba6-b913a15c2eed\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-qtpm9" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.139701 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b-cert\") pod \"infra-operator-controller-manager-7d857cc749-jf4kc\" (UID: \"aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-jf4kc" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.139720 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2acf945e-a5dd-457a-b4d6-4e487056b64a-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-jjdj8\" (UID: \"2acf945e-a5dd-457a-b4d6-4e487056b64a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-jjdj8" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.139773 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwjgm\" (UniqueName: \"kubernetes.io/projected/8ef1bec4-7f6d-472d-b92d-495e3f87ae16-kube-api-access-fwjgm\") pod \"placement-operator-controller-manager-589c58c6c-7ncwh\" (UID: \"8ef1bec4-7f6d-472d-b92d-495e3f87ae16\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-7ncwh" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.139798 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcngr\" (UniqueName: \"kubernetes.io/projected/bf650466-50bb-4a1f-8bac-0098c8127167-kube-api-access-fcngr\") pod \"watcher-operator-controller-manager-76669f99c-nshn9\" (UID: \"bf650466-50bb-4a1f-8bac-0098c8127167\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-nshn9" Sep 30 00:26:36 crc kubenswrapper[4809]: E0930 00:26:36.140749 4809 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 30 00:26:36 crc kubenswrapper[4809]: E0930 00:26:36.140829 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2acf945e-a5dd-457a-b4d6-4e487056b64a-cert podName:2acf945e-a5dd-457a-b4d6-4e487056b64a nodeName:}" failed. No retries permitted until 2025-09-30 00:26:36.640792245 +0000 UTC m=+1047.677041653 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2acf945e-a5dd-457a-b4d6-4e487056b64a-cert") pod "openstack-baremetal-operator-controller-manager-6d776955-jjdj8" (UID: "2acf945e-a5dd-457a-b4d6-4e487056b64a") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.147026 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b-cert\") pod \"infra-operator-controller-manager-7d857cc749-jf4kc\" (UID: \"aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-jf4kc" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.149422 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-p85rd" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.165786 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-nshn9"] Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.166690 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwjgm\" (UniqueName: \"kubernetes.io/projected/8ef1bec4-7f6d-472d-b92d-495e3f87ae16-kube-api-access-fwjgm\") pod \"placement-operator-controller-manager-589c58c6c-7ncwh\" (UID: \"8ef1bec4-7f6d-472d-b92d-495e3f87ae16\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-7ncwh" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.175315 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5r4xq\" (UniqueName: \"kubernetes.io/projected/d81fb42b-3e73-46fd-a522-ed87475d4a89-kube-api-access-5r4xq\") pod \"ovn-operator-controller-manager-9976ff44c-h8xbq\" (UID: \"d81fb42b-3e73-46fd-a522-ed87475d4a89\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-h8xbq" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.184776 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdszn\" (UniqueName: \"kubernetes.io/projected/2acf945e-a5dd-457a-b4d6-4e487056b64a-kube-api-access-zdszn\") pod \"openstack-baremetal-operator-controller-manager-6d776955-jjdj8\" (UID: \"2acf945e-a5dd-457a-b4d6-4e487056b64a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-jjdj8" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.197657 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-tqr46" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.215069 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-f4db64f4c-x4j4m"] Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.217153 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-f4db64f4c-x4j4m" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.218210 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-jf4kc" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.220279 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-f4db64f4c-x4j4m"] Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.224610 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-6qdwp" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.225321 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.241538 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjnwz\" (UniqueName: \"kubernetes.io/projected/842e3f23-bd61-4237-8739-1ac1ccb2fd22-kube-api-access-xjnwz\") pod \"swift-operator-controller-manager-bc7dc7bd9-mqwrp\" (UID: \"842e3f23-bd61-4237-8739-1ac1ccb2fd22\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-mqwrp" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.241589 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hslzv\" (UniqueName: \"kubernetes.io/projected/445fd9f7-790d-42ea-9e64-183158dc1211-kube-api-access-hslzv\") pod \"telemetry-operator-controller-manager-84b8546f9c-jzhxh\" (UID: \"445fd9f7-790d-42ea-9e64-183158dc1211\") " pod="openstack-operators/telemetry-operator-controller-manager-84b8546f9c-jzhxh" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.241618 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlwfx\" (UniqueName: \"kubernetes.io/projected/9728ab12-7cab-4897-9ba6-b913a15c2eed-kube-api-access-jlwfx\") pod \"test-operator-controller-manager-f66b554c6-qtpm9\" (UID: \"9728ab12-7cab-4897-9ba6-b913a15c2eed\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-qtpm9" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.241711 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcngr\" (UniqueName: \"kubernetes.io/projected/bf650466-50bb-4a1f-8bac-0098c8127167-kube-api-access-fcngr\") pod \"watcher-operator-controller-manager-76669f99c-nshn9\" (UID: \"bf650466-50bb-4a1f-8bac-0098c8127167\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-nshn9" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.267401 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cmrm8"] Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.268669 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cmrm8" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.271668 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-j29q2" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.275120 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cmrm8"] Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.276186 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-h8xbq" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.280300 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcngr\" (UniqueName: \"kubernetes.io/projected/bf650466-50bb-4a1f-8bac-0098c8127167-kube-api-access-fcngr\") pod \"watcher-operator-controller-manager-76669f99c-nshn9\" (UID: \"bf650466-50bb-4a1f-8bac-0098c8127167\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-nshn9" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.283920 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjnwz\" (UniqueName: \"kubernetes.io/projected/842e3f23-bd61-4237-8739-1ac1ccb2fd22-kube-api-access-xjnwz\") pod \"swift-operator-controller-manager-bc7dc7bd9-mqwrp\" (UID: \"842e3f23-bd61-4237-8739-1ac1ccb2fd22\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-mqwrp" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.284587 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlwfx\" (UniqueName: \"kubernetes.io/projected/9728ab12-7cab-4897-9ba6-b913a15c2eed-kube-api-access-jlwfx\") pod \"test-operator-controller-manager-f66b554c6-qtpm9\" (UID: \"9728ab12-7cab-4897-9ba6-b913a15c2eed\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-qtpm9" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.287166 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hslzv\" (UniqueName: \"kubernetes.io/projected/445fd9f7-790d-42ea-9e64-183158dc1211-kube-api-access-hslzv\") pod \"telemetry-operator-controller-manager-84b8546f9c-jzhxh\" (UID: \"445fd9f7-790d-42ea-9e64-183158dc1211\") " pod="openstack-operators/telemetry-operator-controller-manager-84b8546f9c-jzhxh" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.321388 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-2jhx6"] Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.335634 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-7ncwh" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.343521 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8sdl4\" (UniqueName: \"kubernetes.io/projected/b7e3889e-1f91-45ab-8431-1d406c1c4f7e-kube-api-access-8sdl4\") pod \"openstack-operator-controller-manager-f4db64f4c-x4j4m\" (UID: \"b7e3889e-1f91-45ab-8431-1d406c1c4f7e\") " pod="openstack-operators/openstack-operator-controller-manager-f4db64f4c-x4j4m" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.343613 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b7e3889e-1f91-45ab-8431-1d406c1c4f7e-cert\") pod \"openstack-operator-controller-manager-f4db64f4c-x4j4m\" (UID: \"b7e3889e-1f91-45ab-8431-1d406c1c4f7e\") " pod="openstack-operators/openstack-operator-controller-manager-f4db64f4c-x4j4m" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.363979 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-mqwrp" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.392975 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-84b8546f9c-jzhxh" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.445355 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tb5b\" (UniqueName: \"kubernetes.io/projected/0638d215-6c30-4683-894b-c91bdb99affb-kube-api-access-7tb5b\") pod \"rabbitmq-cluster-operator-manager-79d8469568-cmrm8\" (UID: \"0638d215-6c30-4683-894b-c91bdb99affb\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cmrm8" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.445456 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8sdl4\" (UniqueName: \"kubernetes.io/projected/b7e3889e-1f91-45ab-8431-1d406c1c4f7e-kube-api-access-8sdl4\") pod \"openstack-operator-controller-manager-f4db64f4c-x4j4m\" (UID: \"b7e3889e-1f91-45ab-8431-1d406c1c4f7e\") " pod="openstack-operators/openstack-operator-controller-manager-f4db64f4c-x4j4m" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.445478 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b7e3889e-1f91-45ab-8431-1d406c1c4f7e-cert\") pod \"openstack-operator-controller-manager-f4db64f4c-x4j4m\" (UID: \"b7e3889e-1f91-45ab-8431-1d406c1c4f7e\") " pod="openstack-operators/openstack-operator-controller-manager-f4db64f4c-x4j4m" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.468985 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-nshn9" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.471149 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-qtpm9" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.482180 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8sdl4\" (UniqueName: \"kubernetes.io/projected/b7e3889e-1f91-45ab-8431-1d406c1c4f7e-kube-api-access-8sdl4\") pod \"openstack-operator-controller-manager-f4db64f4c-x4j4m\" (UID: \"b7e3889e-1f91-45ab-8431-1d406c1c4f7e\") " pod="openstack-operators/openstack-operator-controller-manager-f4db64f4c-x4j4m" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.487105 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b7e3889e-1f91-45ab-8431-1d406c1c4f7e-cert\") pod \"openstack-operator-controller-manager-f4db64f4c-x4j4m\" (UID: \"b7e3889e-1f91-45ab-8431-1d406c1c4f7e\") " pod="openstack-operators/openstack-operator-controller-manager-f4db64f4c-x4j4m" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.551325 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tb5b\" (UniqueName: \"kubernetes.io/projected/0638d215-6c30-4683-894b-c91bdb99affb-kube-api-access-7tb5b\") pod \"rabbitmq-cluster-operator-manager-79d8469568-cmrm8\" (UID: \"0638d215-6c30-4683-894b-c91bdb99affb\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cmrm8" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.580100 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tb5b\" (UniqueName: \"kubernetes.io/projected/0638d215-6c30-4683-894b-c91bdb99affb-kube-api-access-7tb5b\") pod \"rabbitmq-cluster-operator-manager-79d8469568-cmrm8\" (UID: \"0638d215-6c30-4683-894b-c91bdb99affb\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cmrm8" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.652772 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2acf945e-a5dd-457a-b4d6-4e487056b64a-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-jjdj8\" (UID: \"2acf945e-a5dd-457a-b4d6-4e487056b64a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-jjdj8" Sep 30 00:26:36 crc kubenswrapper[4809]: E0930 00:26:36.652973 4809 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 30 00:26:36 crc kubenswrapper[4809]: E0930 00:26:36.653053 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2acf945e-a5dd-457a-b4d6-4e487056b64a-cert podName:2acf945e-a5dd-457a-b4d6-4e487056b64a nodeName:}" failed. No retries permitted until 2025-09-30 00:26:37.653033512 +0000 UTC m=+1048.689282920 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2acf945e-a5dd-457a-b4d6-4e487056b64a-cert") pod "openstack-baremetal-operator-controller-manager-6d776955-jjdj8" (UID: "2acf945e-a5dd-457a-b4d6-4e487056b64a") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.675536 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-f4db64f4c-x4j4m" Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.711096 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-4tp9f"] Sep 30 00:26:36 crc kubenswrapper[4809]: I0930 00:26:36.879307 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cmrm8" Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.100868 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-njdxf"] Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.129549 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-jbqng"] Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.132364 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-2jhx6" event={"ID":"6306e69b-dd84-4e09-b462-71a7f858d351","Type":"ContainerStarted","Data":"c8fe5a7cf64b55b150740885b6a590ec76d3304bb03955b17ef268bff5d05d97"} Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.134192 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-4tp9f" event={"ID":"e5433da0-c4a6-46a5-b936-bf361aca6946","Type":"ContainerStarted","Data":"5985b7eea63be24985d2de229862bd21f4582d80a46bb4c211cd5cd235bef029"} Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.627383 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-v7sp5"] Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.640779 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-h8xbq"] Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.653571 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-9dbvw"] Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.679203 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2acf945e-a5dd-457a-b4d6-4e487056b64a-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-jjdj8\" (UID: \"2acf945e-a5dd-457a-b4d6-4e487056b64a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-jjdj8" Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.680792 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-7t8jm"] Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.699475 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2acf945e-a5dd-457a-b4d6-4e487056b64a-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-jjdj8\" (UID: \"2acf945e-a5dd-457a-b4d6-4e487056b64a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-jjdj8" Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.706179 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-s8j8c"] Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.706220 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-6qsgb"] Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.706796 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-jf4kc"] Sep 30 00:26:37 crc kubenswrapper[4809]: W0930 00:26:37.707755 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70a9a1c8_b1be_4c24_b2da_b0d75b5aaabb.slice/crio-6a4d13b6e13016e9832f26c0bc3746eb8cb1b4d1f5cadccd7c750aa28dd09979 WatchSource:0}: Error finding container 6a4d13b6e13016e9832f26c0bc3746eb8cb1b4d1f5cadccd7c750aa28dd09979: Status 404 returned error can't find the container with id 6a4d13b6e13016e9832f26c0bc3746eb8cb1b4d1f5cadccd7c750aa28dd09979 Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.718775 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-xcmjw"] Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.725971 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-p85rd"] Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.731304 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-5fpgn"] Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.741434 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-jjdj8" Sep 30 00:26:37 crc kubenswrapper[4809]: W0930 00:26:37.759186 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4a314eca_cb77_46bf_bebb_0a2b6910259d.slice/crio-306eaef69d5fa15718bb5d5c2a33a166dda8c951adbae11528902f2ce30200ae WatchSource:0}: Error finding container 306eaef69d5fa15718bb5d5c2a33a166dda8c951adbae11528902f2ce30200ae: Status 404 returned error can't find the container with id 306eaef69d5fa15718bb5d5c2a33a166dda8c951adbae11528902f2ce30200ae Sep 30 00:26:37 crc kubenswrapper[4809]: W0930 00:26:37.759672 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaaa0a31e_8d3a_4846_8ca9_c55a9b33cd8b.slice/crio-959ff9be02e022b0284e38227f718ed71cb5bdb501281f49f78612765f3cac26 WatchSource:0}: Error finding container 959ff9be02e022b0284e38227f718ed71cb5bdb501281f49f78612765f3cac26: Status 404 returned error can't find the container with id 959ff9be02e022b0284e38227f718ed71cb5bdb501281f49f78612765f3cac26 Sep 30 00:26:37 crc kubenswrapper[4809]: W0930 00:26:37.770367 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod892fd48f_a2bf_4c2e_8167_1259635511a2.slice/crio-76240a863f236a7c8b1d5d12477c7f6f56378f7f304622ff8fc41455c7db41e1 WatchSource:0}: Error finding container 76240a863f236a7c8b1d5d12477c7f6f56378f7f304622ff8fc41455c7db41e1: Status 404 returned error can't find the container with id 76240a863f236a7c8b1d5d12477c7f6f56378f7f304622ff8fc41455c7db41e1 Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.885933 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-mqwrp"] Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.897179 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-tqr46"] Sep 30 00:26:37 crc kubenswrapper[4809]: W0930 00:26:37.908282 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd5b3350c_20ba_4b71_9d17_837992ec8740.slice/crio-283db31f7ddd7eb00b9e9bdf2c927e5b4531d940c56a3ac8b8d0c37e0d361c6d WatchSource:0}: Error finding container 283db31f7ddd7eb00b9e9bdf2c927e5b4531d940c56a3ac8b8d0c37e0d361c6d: Status 404 returned error can't find the container with id 283db31f7ddd7eb00b9e9bdf2c927e5b4531d940c56a3ac8b8d0c37e0d361c6d Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.911040 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-7ncwh"] Sep 30 00:26:37 crc kubenswrapper[4809]: W0930 00:26:37.912726 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod842e3f23_bd61_4237_8739_1ac1ccb2fd22.slice/crio-f9f0cfa28b2f1895bece7cb7b3a83ae3989728d2975648d25ae14676d66fd685 WatchSource:0}: Error finding container f9f0cfa28b2f1895bece7cb7b3a83ae3989728d2975648d25ae14676d66fd685: Status 404 returned error can't find the container with id f9f0cfa28b2f1895bece7cb7b3a83ae3989728d2975648d25ae14676d66fd685 Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.926903 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-f4db64f4c-x4j4m"] Sep 30 00:26:37 crc kubenswrapper[4809]: E0930 00:26:37.931112 4809 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hnlhx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-76fcc6dc7c-tqr46_openstack-operators(d5b3350c-20ba-4b71-9d17-837992ec8740): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 00:26:37 crc kubenswrapper[4809]: E0930 00:26:37.934968 4809 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fwjgm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-589c58c6c-7ncwh_openstack-operators(8ef1bec4-7f6d-472d-b92d-495e3f87ae16): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 00:26:37 crc kubenswrapper[4809]: E0930 00:26:37.935255 4809 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.36:5001/openstack-k8s-operators/telemetry-operator:fb90e2d5d5450ef9908e16413d7df8331d5162ce,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hslzv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-84b8546f9c-jzhxh_openstack-operators(445fd9f7-790d-42ea-9e64-183158dc1211): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.945521 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-qtpm9"] Sep 30 00:26:37 crc kubenswrapper[4809]: E0930 00:26:37.964046 4809 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7tb5b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-79d8469568-cmrm8_openstack-operators(0638d215-6c30-4683-894b-c91bdb99affb): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 00:26:37 crc kubenswrapper[4809]: E0930 00:26:37.966254 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cmrm8" podUID="0638d215-6c30-4683-894b-c91bdb99affb" Sep 30 00:26:37 crc kubenswrapper[4809]: E0930 00:26:37.975658 4809 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jlwfx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-f66b554c6-qtpm9_openstack-operators(9728ab12-7cab-4897-9ba6-b913a15c2eed): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 00:26:37 crc kubenswrapper[4809]: I0930 00:26:37.987021 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-84b8546f9c-jzhxh"] Sep 30 00:26:38 crc kubenswrapper[4809]: E0930 00:26:38.007894 4809 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fcngr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-76669f99c-nshn9_openstack-operators(bf650466-50bb-4a1f-8bac-0098c8127167): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.011297 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cmrm8"] Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.036081 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-nshn9"] Sep 30 00:26:38 crc kubenswrapper[4809]: E0930 00:26:38.136228 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-tqr46" podUID="d5b3350c-20ba-4b71-9d17-837992ec8740" Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.159658 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-jf4kc" event={"ID":"aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b","Type":"ContainerStarted","Data":"959ff9be02e022b0284e38227f718ed71cb5bdb501281f49f78612765f3cac26"} Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.166044 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-f4db64f4c-x4j4m" event={"ID":"b7e3889e-1f91-45ab-8431-1d406c1c4f7e","Type":"ContainerStarted","Data":"4920cbc5a0a448e05839ac4907563a028dabf4ea88c7e431ebf1951f61f81c55"} Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.166105 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-f4db64f4c-x4j4m" event={"ID":"b7e3889e-1f91-45ab-8431-1d406c1c4f7e","Type":"ContainerStarted","Data":"909f2f340de1ceb42876404d3a5bd4d20aa6fdb073c072258c6b8647973a8240"} Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.172916 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-qtpm9" event={"ID":"9728ab12-7cab-4897-9ba6-b913a15c2eed","Type":"ContainerStarted","Data":"9aa3647a2b292018c32d6246cc1014ae17555e6b58ee490cb7bfbedec8417210"} Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.174270 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-s8j8c" event={"ID":"904e7639-6336-4012-a8f3-2b6e9c134a1f","Type":"ContainerStarted","Data":"c8809ff9418b6500fbfc35221a6deb3ce1924b237aae386a603376094b5b0e64"} Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.175960 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-v7sp5" event={"ID":"0123e31b-cbb3-4545-b679-c7b27eeaebba","Type":"ContainerStarted","Data":"2b7d223bd200148e584904bf0256a2c5fb5bde4c8d646959d638ef0d197d4f92"} Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.178390 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-xcmjw" event={"ID":"892fd48f-a2bf-4c2e-8167-1259635511a2","Type":"ContainerStarted","Data":"76240a863f236a7c8b1d5d12477c7f6f56378f7f304622ff8fc41455c7db41e1"} Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.180401 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-tqr46" event={"ID":"d5b3350c-20ba-4b71-9d17-837992ec8740","Type":"ContainerStarted","Data":"dc36ea86a5ca2bbc54dc90e3266a85e733c223b3cab7b1b25330209cd0c7ba77"} Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.180425 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-tqr46" event={"ID":"d5b3350c-20ba-4b71-9d17-837992ec8740","Type":"ContainerStarted","Data":"283db31f7ddd7eb00b9e9bdf2c927e5b4531d940c56a3ac8b8d0c37e0d361c6d"} Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.182860 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-7t8jm" event={"ID":"8817a592-d1b1-4695-b0fe-71b93371e5ea","Type":"ContainerStarted","Data":"f2d2e7c942de6d5073f0b920023d291ac0f729a623b80b0700a0d1003c72f1b7"} Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.190324 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-nshn9" event={"ID":"bf650466-50bb-4a1f-8bac-0098c8127167","Type":"ContainerStarted","Data":"1a3cf5163c8b27b5a034d84a9a7a0908e15183a640ce4e33658b825b85d60b37"} Sep 30 00:26:38 crc kubenswrapper[4809]: E0930 00:26:38.190802 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-tqr46" podUID="d5b3350c-20ba-4b71-9d17-837992ec8740" Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.197555 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-njdxf" event={"ID":"a44af2a0-4faa-4b9d-a426-f137586b0844","Type":"ContainerStarted","Data":"fce1497d80edbec12c5dea7237f8e73ca45140d88f2934294cf3711db0c93138"} Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.203014 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-jbqng" event={"ID":"2a80b8c2-74ea-4a89-a241-026ffb8c2b1a","Type":"ContainerStarted","Data":"fb4ce61c7b717145b23251d1447a1c685c9ae45a4b62bb328cee58d88d873dcf"} Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.229284 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-mqwrp" event={"ID":"842e3f23-bd61-4237-8739-1ac1ccb2fd22","Type":"ContainerStarted","Data":"f9f0cfa28b2f1895bece7cb7b3a83ae3989728d2975648d25ae14676d66fd685"} Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.233616 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-p85rd" event={"ID":"d584c96f-2058-47bb-a205-672cebe71309","Type":"ContainerStarted","Data":"5c6e42d43d2cdaf6887fe066593a5285eeabf757d8943353248c578871f063ba"} Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.235309 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-h8xbq" event={"ID":"d81fb42b-3e73-46fd-a522-ed87475d4a89","Type":"ContainerStarted","Data":"254ef72450ab32eba4330fe9043ace9341cbf2f603cc025198029dc01a020d8a"} Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.239212 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-6qsgb" event={"ID":"70a9a1c8-b1be-4c24-b2da-b0d75b5aaabb","Type":"ContainerStarted","Data":"6a4d13b6e13016e9832f26c0bc3746eb8cb1b4d1f5cadccd7c750aa28dd09979"} Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.246077 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-5fpgn" event={"ID":"4a314eca-cb77-46bf-bebb-0a2b6910259d","Type":"ContainerStarted","Data":"306eaef69d5fa15718bb5d5c2a33a166dda8c951adbae11528902f2ce30200ae"} Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.250833 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-84b8546f9c-jzhxh" event={"ID":"445fd9f7-790d-42ea-9e64-183158dc1211","Type":"ContainerStarted","Data":"3ee8180a27fe7f19df7db67f895fbb2e3140fb6efd4cbfc42ae96ac58072584f"} Sep 30 00:26:38 crc kubenswrapper[4809]: E0930 00:26:38.268997 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-84b8546f9c-jzhxh" podUID="445fd9f7-790d-42ea-9e64-183158dc1211" Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.279088 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cmrm8" event={"ID":"0638d215-6c30-4683-894b-c91bdb99affb","Type":"ContainerStarted","Data":"7a3d403b193784eadf29272f660a3ef2669f89c16e330773e58e1c9929bb376b"} Sep 30 00:26:38 crc kubenswrapper[4809]: E0930 00:26:38.289007 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cmrm8" podUID="0638d215-6c30-4683-894b-c91bdb99affb" Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.301529 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-7ncwh" event={"ID":"8ef1bec4-7f6d-472d-b92d-495e3f87ae16","Type":"ContainerStarted","Data":"3cdb8c809887acc98a00ed7eccdda4c9901a49175136aeb65374c843cc0adabf"} Sep 30 00:26:38 crc kubenswrapper[4809]: E0930 00:26:38.320250 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-7ncwh" podUID="8ef1bec4-7f6d-472d-b92d-495e3f87ae16" Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.320532 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9dbvw" event={"ID":"11ecdd7b-edcc-44fd-9a10-ab2f58a40430","Type":"ContainerStarted","Data":"ab24889f1ca3b0b756b4e393b8b97b682be913b0984b5a57ac49dd2618bca418"} Sep 30 00:26:38 crc kubenswrapper[4809]: I0930 00:26:38.365484 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-jjdj8"] Sep 30 00:26:38 crc kubenswrapper[4809]: W0930 00:26:38.377766 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2acf945e_a5dd_457a_b4d6_4e487056b64a.slice/crio-91dcc73f447a96f44ee84c4f1a45b41a6030d87cad7aa1be97581187315706a4 WatchSource:0}: Error finding container 91dcc73f447a96f44ee84c4f1a45b41a6030d87cad7aa1be97581187315706a4: Status 404 returned error can't find the container with id 91dcc73f447a96f44ee84c4f1a45b41a6030d87cad7aa1be97581187315706a4 Sep 30 00:26:38 crc kubenswrapper[4809]: E0930 00:26:38.394738 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-qtpm9" podUID="9728ab12-7cab-4897-9ba6-b913a15c2eed" Sep 30 00:26:38 crc kubenswrapper[4809]: E0930 00:26:38.446589 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-nshn9" podUID="bf650466-50bb-4a1f-8bac-0098c8127167" Sep 30 00:26:39 crc kubenswrapper[4809]: I0930 00:26:39.360577 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-84b8546f9c-jzhxh" event={"ID":"445fd9f7-790d-42ea-9e64-183158dc1211","Type":"ContainerStarted","Data":"71a6c4dbcf063455dbabb36d15e6950230cb2190a6b47c3b23f768deaf102335"} Sep 30 00:26:39 crc kubenswrapper[4809]: E0930 00:26:39.367022 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.36:5001/openstack-k8s-operators/telemetry-operator:fb90e2d5d5450ef9908e16413d7df8331d5162ce\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-84b8546f9c-jzhxh" podUID="445fd9f7-790d-42ea-9e64-183158dc1211" Sep 30 00:26:39 crc kubenswrapper[4809]: I0930 00:26:39.395965 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-f4db64f4c-x4j4m" event={"ID":"b7e3889e-1f91-45ab-8431-1d406c1c4f7e","Type":"ContainerStarted","Data":"93fd625de4dc474750ad1fba4c6e124d8878103748354dcaf000f13f8fc50dad"} Sep 30 00:26:39 crc kubenswrapper[4809]: I0930 00:26:39.396995 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-f4db64f4c-x4j4m" Sep 30 00:26:39 crc kubenswrapper[4809]: I0930 00:26:39.426904 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-qtpm9" event={"ID":"9728ab12-7cab-4897-9ba6-b913a15c2eed","Type":"ContainerStarted","Data":"b1dfbe34b606e4c087c6d7c69ee0f072cfeb43541aed721df10026476b66c619"} Sep 30 00:26:39 crc kubenswrapper[4809]: E0930 00:26:39.435350 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-qtpm9" podUID="9728ab12-7cab-4897-9ba6-b913a15c2eed" Sep 30 00:26:39 crc kubenswrapper[4809]: I0930 00:26:39.437266 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-f4db64f4c-x4j4m" podStartSLOduration=4.437251786 podStartE2EDuration="4.437251786s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:26:39.435699364 +0000 UTC m=+1050.471948772" watchObservedRunningTime="2025-09-30 00:26:39.437251786 +0000 UTC m=+1050.473501184" Sep 30 00:26:39 crc kubenswrapper[4809]: I0930 00:26:39.450927 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-jjdj8" event={"ID":"2acf945e-a5dd-457a-b4d6-4e487056b64a","Type":"ContainerStarted","Data":"91dcc73f447a96f44ee84c4f1a45b41a6030d87cad7aa1be97581187315706a4"} Sep 30 00:26:39 crc kubenswrapper[4809]: I0930 00:26:39.454742 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-7ncwh" event={"ID":"8ef1bec4-7f6d-472d-b92d-495e3f87ae16","Type":"ContainerStarted","Data":"5112bd9a9cd831bb383f07901b44d17cc36014d2e4d2739cebc369e4fe1f2912"} Sep 30 00:26:39 crc kubenswrapper[4809]: E0930 00:26:39.468043 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2\\\"\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-7ncwh" podUID="8ef1bec4-7f6d-472d-b92d-495e3f87ae16" Sep 30 00:26:39 crc kubenswrapper[4809]: I0930 00:26:39.472545 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-nshn9" event={"ID":"bf650466-50bb-4a1f-8bac-0098c8127167","Type":"ContainerStarted","Data":"94c923d6b6730bc672d9ff2a9607c51e4b3566b3ba5af4612432a16b8a45f8e8"} Sep 30 00:26:39 crc kubenswrapper[4809]: E0930 00:26:39.482392 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cmrm8" podUID="0638d215-6c30-4683-894b-c91bdb99affb" Sep 30 00:26:39 crc kubenswrapper[4809]: E0930 00:26:39.482470 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-nshn9" podUID="bf650466-50bb-4a1f-8bac-0098c8127167" Sep 30 00:26:39 crc kubenswrapper[4809]: E0930 00:26:39.482539 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-tqr46" podUID="d5b3350c-20ba-4b71-9d17-837992ec8740" Sep 30 00:26:40 crc kubenswrapper[4809]: E0930 00:26:40.481985 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.36:5001/openstack-k8s-operators/telemetry-operator:fb90e2d5d5450ef9908e16413d7df8331d5162ce\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-84b8546f9c-jzhxh" podUID="445fd9f7-790d-42ea-9e64-183158dc1211" Sep 30 00:26:40 crc kubenswrapper[4809]: E0930 00:26:40.482017 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2\\\"\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-7ncwh" podUID="8ef1bec4-7f6d-472d-b92d-495e3f87ae16" Sep 30 00:26:40 crc kubenswrapper[4809]: E0930 00:26:40.482193 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-nshn9" podUID="bf650466-50bb-4a1f-8bac-0098c8127167" Sep 30 00:26:40 crc kubenswrapper[4809]: E0930 00:26:40.482222 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-qtpm9" podUID="9728ab12-7cab-4897-9ba6-b913a15c2eed" Sep 30 00:26:46 crc kubenswrapper[4809]: I0930 00:26:46.682028 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-f4db64f4c-x4j4m" Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.772857 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-mqwrp" event={"ID":"842e3f23-bd61-4237-8739-1ac1ccb2fd22","Type":"ContainerStarted","Data":"d087d45a9cbc667c48a1c56217e8ab33fe382b8e3e4d81e6be1d16f6f2de8df7"} Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.775875 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-h8xbq" event={"ID":"d81fb42b-3e73-46fd-a522-ed87475d4a89","Type":"ContainerStarted","Data":"caff3e64bf17b110d25e4194278a0b526f2edbeeed698998123dfb24951f2eb7"} Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.799439 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-7t8jm" event={"ID":"8817a592-d1b1-4695-b0fe-71b93371e5ea","Type":"ContainerStarted","Data":"6fa88dd5853d42b9d03831c021a7cd71f77bcb199c7ad70051111885b6dc3057"} Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.836298 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-4tp9f" event={"ID":"e5433da0-c4a6-46a5-b936-bf361aca6946","Type":"ContainerStarted","Data":"5988caaa18e01164359b8ae26dd904d0aadde3dffe784d093d2d83296a599346"} Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.855146 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-jf4kc" event={"ID":"aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b","Type":"ContainerStarted","Data":"60f7015adbb403c44b6146761686091a3957b5095f79cac4f0f27fe038f1677e"} Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.867800 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-2jhx6" event={"ID":"6306e69b-dd84-4e09-b462-71a7f858d351","Type":"ContainerStarted","Data":"6a24c99f8de887c95445e5560a22ee88f11b957b53d3fec95d4110ff55e7e33f"} Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.881923 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-jjdj8" event={"ID":"2acf945e-a5dd-457a-b4d6-4e487056b64a","Type":"ContainerStarted","Data":"0eb0c57c6e027fbca0b463712421852de168d735c44d66793592c095c6c6c937"} Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.895536 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-5fpgn" event={"ID":"4a314eca-cb77-46bf-bebb-0a2b6910259d","Type":"ContainerStarted","Data":"8ce7f7b7fdd67635f5f4ec9de47799e503846ca5e4f53b4a0b6ac8b9fa080243"} Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.903742 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-jbqng" event={"ID":"2a80b8c2-74ea-4a89-a241-026ffb8c2b1a","Type":"ContainerStarted","Data":"f54b0c296f060e6f7a28a61eb938be2015bf4c2dd582a023c5496e689f5d780e"} Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.903806 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-jbqng" event={"ID":"2a80b8c2-74ea-4a89-a241-026ffb8c2b1a","Type":"ContainerStarted","Data":"14e94b1496998c9b1626d2af113371d31ef4b9a4117b5d241ab9311e044f4989"} Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.903886 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-jbqng" Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.918464 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-s8j8c" event={"ID":"904e7639-6336-4012-a8f3-2b6e9c134a1f","Type":"ContainerStarted","Data":"04596757adc3902bc1711cdde06ce6504e405d47ca933f9ea12e1887797f4518"} Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.931466 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9dbvw" event={"ID":"11ecdd7b-edcc-44fd-9a10-ab2f58a40430","Type":"ContainerStarted","Data":"f07f803e67a633bedae12770b3750eaa5d6312cd36798c55a0412329efaf29c1"} Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.932943 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-jbqng" podStartSLOduration=3.963770082 podStartE2EDuration="14.932928274s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:37.127866154 +0000 UTC m=+1048.164115562" lastFinishedPulling="2025-09-30 00:26:48.097024346 +0000 UTC m=+1059.133273754" observedRunningTime="2025-09-30 00:26:49.927322063 +0000 UTC m=+1060.963571471" watchObservedRunningTime="2025-09-30 00:26:49.932928274 +0000 UTC m=+1060.969177682" Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.941776 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-6qsgb" event={"ID":"70a9a1c8-b1be-4c24-b2da-b0d75b5aaabb","Type":"ContainerStarted","Data":"e050f9c9e9be69e10cde17f1ec40f968263b2fd0eaa1c1de32c53186d205419f"} Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.954991 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-njdxf" event={"ID":"a44af2a0-4faa-4b9d-a426-f137586b0844","Type":"ContainerStarted","Data":"9b2396de443239bda6b7d07c144edb45612b74a0739cd2956a835d0674da11cb"} Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.968677 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-v7sp5" event={"ID":"0123e31b-cbb3-4545-b679-c7b27eeaebba","Type":"ContainerStarted","Data":"4b7084cafa6bc45caca19e65a800a878218f17817fe24d57e6cee3cc41a8204e"} Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.978539 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-p85rd" event={"ID":"d584c96f-2058-47bb-a205-672cebe71309","Type":"ContainerStarted","Data":"354217d1685fc85e714d197c190635afb92f6322092b8e38bab220bf38114ea1"} Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.987874 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-xcmjw" event={"ID":"892fd48f-a2bf-4c2e-8167-1259635511a2","Type":"ContainerStarted","Data":"8b6f604ce3887f78347cd973c3c8f7a172360bb18a8c78d780cd6f50065a72d9"} Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.987931 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-xcmjw" event={"ID":"892fd48f-a2bf-4c2e-8167-1259635511a2","Type":"ContainerStarted","Data":"a50106272d597dd8d9a810cdf7826d7edb5d35b4d8e25b76dcdc1a8b65001a64"} Sep 30 00:26:49 crc kubenswrapper[4809]: I0930 00:26:49.988765 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-xcmjw" Sep 30 00:26:50 crc kubenswrapper[4809]: I0930 00:26:50.014605 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-xcmjw" podStartSLOduration=4.636927487 podStartE2EDuration="15.01458823s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:37.774890006 +0000 UTC m=+1048.811139414" lastFinishedPulling="2025-09-30 00:26:48.152550729 +0000 UTC m=+1059.188800157" observedRunningTime="2025-09-30 00:26:50.008911347 +0000 UTC m=+1061.045160755" watchObservedRunningTime="2025-09-30 00:26:50.01458823 +0000 UTC m=+1061.050837628" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.000147 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-5fpgn" event={"ID":"4a314eca-cb77-46bf-bebb-0a2b6910259d","Type":"ContainerStarted","Data":"b62a08fddca61b25897a73d3c539178dd99923668c14ce9dc896f9bf6999d62c"} Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.000894 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-88c7-5fpgn" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.003347 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-s8j8c" event={"ID":"904e7639-6336-4012-a8f3-2b6e9c134a1f","Type":"ContainerStarted","Data":"de6b177929a23b1b45321ea6e063bc288e26436e965ea84c3c166ecd3e600454"} Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.003503 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-s8j8c" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.005425 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-mqwrp" event={"ID":"842e3f23-bd61-4237-8739-1ac1ccb2fd22","Type":"ContainerStarted","Data":"39d0b82cfe99053c8468b235ceec56b8b7a5cb71965d1964d92079d475eddf16"} Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.005562 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-mqwrp" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.007713 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-7t8jm" event={"ID":"8817a592-d1b1-4695-b0fe-71b93371e5ea","Type":"ContainerStarted","Data":"ac31ab71a7e3c81d193c7ef18c4ca9576c83b9c87bc7b692eaef36c80c20d007"} Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.007782 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-7t8jm" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.009980 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-njdxf" event={"ID":"a44af2a0-4faa-4b9d-a426-f137586b0844","Type":"ContainerStarted","Data":"04493d6ded6fdd65fb81b4212cd8ede66b51a4c568c65725ba089fa5218e0f51"} Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.010694 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-njdxf" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.011998 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-2jhx6" event={"ID":"6306e69b-dd84-4e09-b462-71a7f858d351","Type":"ContainerStarted","Data":"2671579a61fbc1dcfc37fdd6bb0cdc86f9154c5350c483965e0353be76d8e872"} Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.012573 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-2jhx6" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.014007 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-p85rd" event={"ID":"d584c96f-2058-47bb-a205-672cebe71309","Type":"ContainerStarted","Data":"b8f1323e0aa0228663c18fa0666e85ec25606df169902ccb6de77cb0a48e8364"} Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.015335 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-p85rd" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.020212 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-jjdj8" event={"ID":"2acf945e-a5dd-457a-b4d6-4e487056b64a","Type":"ContainerStarted","Data":"6ac38c934bf20f52411ae0f55ab4759a5342404d01d7500bef7390ce31ae26d0"} Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.021154 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-jjdj8" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.022394 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-v7sp5" event={"ID":"0123e31b-cbb3-4545-b679-c7b27eeaebba","Type":"ContainerStarted","Data":"822f0fe029d87b8de3fb04b93f930cf20a7442309c7756b7044c4d1b1fb04886"} Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.023445 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-v7sp5" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.025557 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9dbvw" event={"ID":"11ecdd7b-edcc-44fd-9a10-ab2f58a40430","Type":"ContainerStarted","Data":"714713bba60815695cc4a7c12d7e629b63f7a9c66892b474ce6722b1f337a107"} Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.035736 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9dbvw" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.046901 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-88c7-5fpgn" podStartSLOduration=5.652345356 podStartE2EDuration="16.046880014s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:37.763339384 +0000 UTC m=+1048.799588792" lastFinishedPulling="2025-09-30 00:26:48.157874042 +0000 UTC m=+1059.194123450" observedRunningTime="2025-09-30 00:26:51.046319659 +0000 UTC m=+1062.082569067" watchObservedRunningTime="2025-09-30 00:26:51.046880014 +0000 UTC m=+1062.083129422" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.047484 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-6qsgb" event={"ID":"70a9a1c8-b1be-4c24-b2da-b0d75b5aaabb","Type":"ContainerStarted","Data":"f75ee7c83c80bf31e77b1bad7dec97efa16d003e5a98becb9054a59391aba13b"} Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.048910 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-6qsgb" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.051786 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-4tp9f" event={"ID":"e5433da0-c4a6-46a5-b936-bf361aca6946","Type":"ContainerStarted","Data":"5275bf94253742ee4ec68da7c1c0afd6b06c66b93a5b2bcc09f5c043f01892b4"} Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.052687 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-4tp9f" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.054490 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-jf4kc" event={"ID":"aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b","Type":"ContainerStarted","Data":"45fa76da66c72715505cef5a3e9d682a1f0db8f396600efe82cbd44383d384f1"} Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.055436 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-jf4kc" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.058700 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-h8xbq" event={"ID":"d81fb42b-3e73-46fd-a522-ed87475d4a89","Type":"ContainerStarted","Data":"ce3cd7c14a08f7e38071c22c57eedcb0a0787205b9433ef71b617ec8c7fab5dd"} Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.093313 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9dbvw" podStartSLOduration=5.578296494 podStartE2EDuration="16.093287342s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:37.681402081 +0000 UTC m=+1048.717651519" lastFinishedPulling="2025-09-30 00:26:48.196392949 +0000 UTC m=+1059.232642367" observedRunningTime="2025-09-30 00:26:51.07128021 +0000 UTC m=+1062.107529628" watchObservedRunningTime="2025-09-30 00:26:51.093287342 +0000 UTC m=+1062.129536750" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.120419 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-jjdj8" podStartSLOduration=6.331726799 podStartE2EDuration="16.120396971s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:38.383767472 +0000 UTC m=+1049.420016880" lastFinishedPulling="2025-09-30 00:26:48.172437644 +0000 UTC m=+1059.208687052" observedRunningTime="2025-09-30 00:26:51.113875746 +0000 UTC m=+1062.150125174" watchObservedRunningTime="2025-09-30 00:26:51.120396971 +0000 UTC m=+1062.156646379" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.141947 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-mqwrp" podStartSLOduration=5.886620507 podStartE2EDuration="16.14192789s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:37.918350904 +0000 UTC m=+1048.954600312" lastFinishedPulling="2025-09-30 00:26:48.173658287 +0000 UTC m=+1059.209907695" observedRunningTime="2025-09-30 00:26:51.133020801 +0000 UTC m=+1062.169270209" watchObservedRunningTime="2025-09-30 00:26:51.14192789 +0000 UTC m=+1062.178177298" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.160720 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-s8j8c" podStartSLOduration=5.645992806 podStartE2EDuration="16.160699845s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:37.710408532 +0000 UTC m=+1048.746657940" lastFinishedPulling="2025-09-30 00:26:48.225115571 +0000 UTC m=+1059.261364979" observedRunningTime="2025-09-30 00:26:51.1564148 +0000 UTC m=+1062.192664228" watchObservedRunningTime="2025-09-30 00:26:51.160699845 +0000 UTC m=+1062.196949253" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.182449 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-v7sp5" podStartSLOduration=5.767302218 podStartE2EDuration="16.18243544s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:37.666715176 +0000 UTC m=+1048.702964594" lastFinishedPulling="2025-09-30 00:26:48.081848368 +0000 UTC m=+1059.118097816" observedRunningTime="2025-09-30 00:26:51.17761123 +0000 UTC m=+1062.213860638" watchObservedRunningTime="2025-09-30 00:26:51.18243544 +0000 UTC m=+1062.218684848" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.194249 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-njdxf" podStartSLOduration=5.206851065 podStartE2EDuration="16.194231857s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:37.120227728 +0000 UTC m=+1048.156477136" lastFinishedPulling="2025-09-30 00:26:48.10760851 +0000 UTC m=+1059.143857928" observedRunningTime="2025-09-30 00:26:51.19027603 +0000 UTC m=+1062.226525428" watchObservedRunningTime="2025-09-30 00:26:51.194231857 +0000 UTC m=+1062.230481255" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.216846 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-p85rd" podStartSLOduration=5.789480144 podStartE2EDuration="16.216829295s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:37.768940545 +0000 UTC m=+1048.805189953" lastFinishedPulling="2025-09-30 00:26:48.196289696 +0000 UTC m=+1059.232539104" observedRunningTime="2025-09-30 00:26:51.213771483 +0000 UTC m=+1062.250020891" watchObservedRunningTime="2025-09-30 00:26:51.216829295 +0000 UTC m=+1062.253078703" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.239785 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-7t8jm" podStartSLOduration=5.743837218 podStartE2EDuration="16.239770132s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:37.730601365 +0000 UTC m=+1048.766850773" lastFinishedPulling="2025-09-30 00:26:48.226534279 +0000 UTC m=+1059.262783687" observedRunningTime="2025-09-30 00:26:51.238927859 +0000 UTC m=+1062.275177267" watchObservedRunningTime="2025-09-30 00:26:51.239770132 +0000 UTC m=+1062.276019540" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.267301 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-2jhx6" podStartSLOduration=4.720478594 podStartE2EDuration="16.267283562s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:36.532496021 +0000 UTC m=+1047.568745429" lastFinishedPulling="2025-09-30 00:26:48.079300989 +0000 UTC m=+1059.115550397" observedRunningTime="2025-09-30 00:26:51.263010236 +0000 UTC m=+1062.299259644" watchObservedRunningTime="2025-09-30 00:26:51.267283562 +0000 UTC m=+1062.303532970" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.279933 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-4tp9f" podStartSLOduration=4.922008322 podStartE2EDuration="16.279913791s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:36.723710213 +0000 UTC m=+1047.759959621" lastFinishedPulling="2025-09-30 00:26:48.081615682 +0000 UTC m=+1059.117865090" observedRunningTime="2025-09-30 00:26:51.277228939 +0000 UTC m=+1062.313478347" watchObservedRunningTime="2025-09-30 00:26:51.279913791 +0000 UTC m=+1062.316163219" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.297544 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-6qsgb" podStartSLOduration=5.828578267 podStartE2EDuration="16.297525976s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:37.727109871 +0000 UTC m=+1048.763359269" lastFinishedPulling="2025-09-30 00:26:48.19605755 +0000 UTC m=+1059.232306978" observedRunningTime="2025-09-30 00:26:51.297262909 +0000 UTC m=+1062.333512337" watchObservedRunningTime="2025-09-30 00:26:51.297525976 +0000 UTC m=+1062.333775384" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.318595 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-jf4kc" podStartSLOduration=5.884099769 podStartE2EDuration="16.318575831s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:37.768545665 +0000 UTC m=+1048.804795073" lastFinishedPulling="2025-09-30 00:26:48.203021727 +0000 UTC m=+1059.239271135" observedRunningTime="2025-09-30 00:26:51.31479894 +0000 UTC m=+1062.351048348" watchObservedRunningTime="2025-09-30 00:26:51.318575831 +0000 UTC m=+1062.354825239" Sep 30 00:26:51 crc kubenswrapper[4809]: I0930 00:26:51.335710 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-h8xbq" podStartSLOduration=5.9340910529999995 podStartE2EDuration="16.335688741s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:37.680360323 +0000 UTC m=+1048.716609731" lastFinishedPulling="2025-09-30 00:26:48.081957981 +0000 UTC m=+1059.118207419" observedRunningTime="2025-09-30 00:26:51.329068914 +0000 UTC m=+1062.365318322" watchObservedRunningTime="2025-09-30 00:26:51.335688741 +0000 UTC m=+1062.371938149" Sep 30 00:26:52 crc kubenswrapper[4809]: I0930 00:26:52.070229 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-h8xbq" Sep 30 00:26:53 crc kubenswrapper[4809]: I0930 00:26:53.082358 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-84b8546f9c-jzhxh" event={"ID":"445fd9f7-790d-42ea-9e64-183158dc1211","Type":"ContainerStarted","Data":"122102fcd516d202dfd8698ba4b59110e1889ab4901e0fc577e4a5c2524fa104"} Sep 30 00:26:53 crc kubenswrapper[4809]: I0930 00:26:53.088291 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-h8xbq" Sep 30 00:26:53 crc kubenswrapper[4809]: I0930 00:26:53.088357 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-4tp9f" Sep 30 00:26:53 crc kubenswrapper[4809]: I0930 00:26:53.092355 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-jjdj8" Sep 30 00:26:53 crc kubenswrapper[4809]: I0930 00:26:53.101509 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-84b8546f9c-jzhxh" podStartSLOduration=3.284742398 podStartE2EDuration="18.101496744s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:37.935103285 +0000 UTC m=+1048.971352683" lastFinishedPulling="2025-09-30 00:26:52.751857611 +0000 UTC m=+1063.788107029" observedRunningTime="2025-09-30 00:26:53.100262141 +0000 UTC m=+1064.136511549" watchObservedRunningTime="2025-09-30 00:26:53.101496744 +0000 UTC m=+1064.137746142" Sep 30 00:26:55 crc kubenswrapper[4809]: I0930 00:26:55.325216 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:26:55 crc kubenswrapper[4809]: I0930 00:26:55.325526 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:26:55 crc kubenswrapper[4809]: I0930 00:26:55.431851 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-2jhx6" Sep 30 00:26:55 crc kubenswrapper[4809]: I0930 00:26:55.602980 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-njdxf" Sep 30 00:26:55 crc kubenswrapper[4809]: I0930 00:26:55.632418 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-jbqng" Sep 30 00:26:55 crc kubenswrapper[4809]: I0930 00:26:55.639153 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-v7sp5" Sep 30 00:26:55 crc kubenswrapper[4809]: I0930 00:26:55.653255 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-s8j8c" Sep 30 00:26:55 crc kubenswrapper[4809]: I0930 00:26:55.723659 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-88c7-5fpgn" Sep 30 00:26:55 crc kubenswrapper[4809]: I0930 00:26:55.771006 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-6qsgb" Sep 30 00:26:55 crc kubenswrapper[4809]: I0930 00:26:55.815042 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9dbvw" Sep 30 00:26:55 crc kubenswrapper[4809]: I0930 00:26:55.979154 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-7t8jm" Sep 30 00:26:56 crc kubenswrapper[4809]: I0930 00:26:56.099324 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-xcmjw" Sep 30 00:26:56 crc kubenswrapper[4809]: I0930 00:26:56.152598 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-p85rd" Sep 30 00:26:56 crc kubenswrapper[4809]: I0930 00:26:56.224825 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-jf4kc" Sep 30 00:26:56 crc kubenswrapper[4809]: I0930 00:26:56.378309 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-mqwrp" Sep 30 00:26:56 crc kubenswrapper[4809]: I0930 00:26:56.394329 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-84b8546f9c-jzhxh" Sep 30 00:26:58 crc kubenswrapper[4809]: I0930 00:26:58.147214 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-tqr46" event={"ID":"d5b3350c-20ba-4b71-9d17-837992ec8740","Type":"ContainerStarted","Data":"e45837cc8dc7024f4ee3d8d43d5129e74dd59cedfbb4a84fd754895dbe4a4998"} Sep 30 00:26:58 crc kubenswrapper[4809]: I0930 00:26:58.147966 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-tqr46" Sep 30 00:26:58 crc kubenswrapper[4809]: I0930 00:26:58.154171 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-qtpm9" event={"ID":"9728ab12-7cab-4897-9ba6-b913a15c2eed","Type":"ContainerStarted","Data":"d45e97a6e1e14fc720eca4851d8a408155f717625e3e7aa3e4ee053afaf7e43f"} Sep 30 00:26:58 crc kubenswrapper[4809]: I0930 00:26:58.154446 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-f66b554c6-qtpm9" Sep 30 00:26:58 crc kubenswrapper[4809]: I0930 00:26:58.156946 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cmrm8" event={"ID":"0638d215-6c30-4683-894b-c91bdb99affb","Type":"ContainerStarted","Data":"77528bad472143b27a2b961a6ffe2dc51b38985d0a613c75413d2c4acd5b31c5"} Sep 30 00:26:58 crc kubenswrapper[4809]: I0930 00:26:58.161348 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-7ncwh" event={"ID":"8ef1bec4-7f6d-472d-b92d-495e3f87ae16","Type":"ContainerStarted","Data":"35fe0e973453b971d44dfd702fc6f31667bca25608620a5e6094c047974918a6"} Sep 30 00:26:58 crc kubenswrapper[4809]: I0930 00:26:58.161602 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-7ncwh" Sep 30 00:26:58 crc kubenswrapper[4809]: I0930 00:26:58.164563 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-nshn9" event={"ID":"bf650466-50bb-4a1f-8bac-0098c8127167","Type":"ContainerStarted","Data":"465758d836a7c110ab08c469c9a60b3a3857de8eaaabc50e45b3086ea7e9cf01"} Sep 30 00:26:58 crc kubenswrapper[4809]: I0930 00:26:58.164797 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-nshn9" Sep 30 00:26:58 crc kubenswrapper[4809]: I0930 00:26:58.176449 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-tqr46" podStartSLOduration=3.838084189 podStartE2EDuration="23.176426336s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:37.930948982 +0000 UTC m=+1048.967198390" lastFinishedPulling="2025-09-30 00:26:57.269291119 +0000 UTC m=+1068.305540537" observedRunningTime="2025-09-30 00:26:58.170060825 +0000 UTC m=+1069.206310233" watchObservedRunningTime="2025-09-30 00:26:58.176426336 +0000 UTC m=+1069.212675754" Sep 30 00:26:58 crc kubenswrapper[4809]: I0930 00:26:58.185454 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cmrm8" podStartSLOduration=3.885945707 podStartE2EDuration="23.185430828s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:37.963894059 +0000 UTC m=+1049.000143467" lastFinishedPulling="2025-09-30 00:26:57.26337919 +0000 UTC m=+1068.299628588" observedRunningTime="2025-09-30 00:26:58.184577615 +0000 UTC m=+1069.220827033" watchObservedRunningTime="2025-09-30 00:26:58.185430828 +0000 UTC m=+1069.221680246" Sep 30 00:26:58 crc kubenswrapper[4809]: I0930 00:26:58.204066 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-7ncwh" podStartSLOduration=3.861998623 podStartE2EDuration="23.204045929s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:37.934843098 +0000 UTC m=+1048.971092506" lastFinishedPulling="2025-09-30 00:26:57.276890384 +0000 UTC m=+1068.313139812" observedRunningTime="2025-09-30 00:26:58.199839786 +0000 UTC m=+1069.236089224" watchObservedRunningTime="2025-09-30 00:26:58.204045929 +0000 UTC m=+1069.240295337" Sep 30 00:26:58 crc kubenswrapper[4809]: I0930 00:26:58.228406 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-f66b554c6-qtpm9" podStartSLOduration=3.85857757 podStartE2EDuration="23.228384954s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:37.97548262 +0000 UTC m=+1049.011732028" lastFinishedPulling="2025-09-30 00:26:57.345290004 +0000 UTC m=+1068.381539412" observedRunningTime="2025-09-30 00:26:58.222357462 +0000 UTC m=+1069.258606880" watchObservedRunningTime="2025-09-30 00:26:58.228384954 +0000 UTC m=+1069.264634372" Sep 30 00:26:58 crc kubenswrapper[4809]: I0930 00:26:58.242046 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-nshn9" podStartSLOduration=3.981620691 podStartE2EDuration="23.242026611s" podCreationTimestamp="2025-09-30 00:26:35 +0000 UTC" firstStartedPulling="2025-09-30 00:26:38.007747109 +0000 UTC m=+1049.043996507" lastFinishedPulling="2025-09-30 00:26:57.268153019 +0000 UTC m=+1068.304402427" observedRunningTime="2025-09-30 00:26:58.240612223 +0000 UTC m=+1069.276861641" watchObservedRunningTime="2025-09-30 00:26:58.242026611 +0000 UTC m=+1069.278276039" Sep 30 00:27:06 crc kubenswrapper[4809]: I0930 00:27:06.200829 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-tqr46" Sep 30 00:27:06 crc kubenswrapper[4809]: I0930 00:27:06.338957 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-7ncwh" Sep 30 00:27:06 crc kubenswrapper[4809]: I0930 00:27:06.397756 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-84b8546f9c-jzhxh" Sep 30 00:27:06 crc kubenswrapper[4809]: I0930 00:27:06.473281 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-nshn9" Sep 30 00:27:06 crc kubenswrapper[4809]: I0930 00:27:06.473689 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-f66b554c6-qtpm9" Sep 30 00:27:22 crc kubenswrapper[4809]: I0930 00:27:22.812025 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-tj67d"] Sep 30 00:27:22 crc kubenswrapper[4809]: I0930 00:27:22.814031 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-tj67d" Sep 30 00:27:22 crc kubenswrapper[4809]: I0930 00:27:22.816952 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Sep 30 00:27:22 crc kubenswrapper[4809]: I0930 00:27:22.817115 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-2fklx" Sep 30 00:27:22 crc kubenswrapper[4809]: I0930 00:27:22.817391 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Sep 30 00:27:22 crc kubenswrapper[4809]: I0930 00:27:22.819579 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Sep 30 00:27:22 crc kubenswrapper[4809]: I0930 00:27:22.844945 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-tj67d"] Sep 30 00:27:22 crc kubenswrapper[4809]: I0930 00:27:22.893617 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-klnh9"] Sep 30 00:27:22 crc kubenswrapper[4809]: I0930 00:27:22.895456 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-klnh9" Sep 30 00:27:22 crc kubenswrapper[4809]: I0930 00:27:22.899023 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Sep 30 00:27:22 crc kubenswrapper[4809]: I0930 00:27:22.909750 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0ef4780-dbbb-4757-892b-0774377e6aaa-config\") pod \"dnsmasq-dns-675f4bcbfc-tj67d\" (UID: \"c0ef4780-dbbb-4757-892b-0774377e6aaa\") " pod="openstack/dnsmasq-dns-675f4bcbfc-tj67d" Sep 30 00:27:22 crc kubenswrapper[4809]: I0930 00:27:22.909889 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2svq5\" (UniqueName: \"kubernetes.io/projected/c0ef4780-dbbb-4757-892b-0774377e6aaa-kube-api-access-2svq5\") pod \"dnsmasq-dns-675f4bcbfc-tj67d\" (UID: \"c0ef4780-dbbb-4757-892b-0774377e6aaa\") " pod="openstack/dnsmasq-dns-675f4bcbfc-tj67d" Sep 30 00:27:22 crc kubenswrapper[4809]: I0930 00:27:22.910711 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-klnh9"] Sep 30 00:27:23 crc kubenswrapper[4809]: I0930 00:27:23.011198 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pqms\" (UniqueName: \"kubernetes.io/projected/446f124f-36ad-46cb-a934-5e737dee0fef-kube-api-access-7pqms\") pod \"dnsmasq-dns-78dd6ddcc-klnh9\" (UID: \"446f124f-36ad-46cb-a934-5e737dee0fef\") " pod="openstack/dnsmasq-dns-78dd6ddcc-klnh9" Sep 30 00:27:23 crc kubenswrapper[4809]: I0930 00:27:23.011261 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/446f124f-36ad-46cb-a934-5e737dee0fef-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-klnh9\" (UID: \"446f124f-36ad-46cb-a934-5e737dee0fef\") " pod="openstack/dnsmasq-dns-78dd6ddcc-klnh9" Sep 30 00:27:23 crc kubenswrapper[4809]: I0930 00:27:23.011301 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/446f124f-36ad-46cb-a934-5e737dee0fef-config\") pod \"dnsmasq-dns-78dd6ddcc-klnh9\" (UID: \"446f124f-36ad-46cb-a934-5e737dee0fef\") " pod="openstack/dnsmasq-dns-78dd6ddcc-klnh9" Sep 30 00:27:23 crc kubenswrapper[4809]: I0930 00:27:23.011491 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0ef4780-dbbb-4757-892b-0774377e6aaa-config\") pod \"dnsmasq-dns-675f4bcbfc-tj67d\" (UID: \"c0ef4780-dbbb-4757-892b-0774377e6aaa\") " pod="openstack/dnsmasq-dns-675f4bcbfc-tj67d" Sep 30 00:27:23 crc kubenswrapper[4809]: I0930 00:27:23.011782 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2svq5\" (UniqueName: \"kubernetes.io/projected/c0ef4780-dbbb-4757-892b-0774377e6aaa-kube-api-access-2svq5\") pod \"dnsmasq-dns-675f4bcbfc-tj67d\" (UID: \"c0ef4780-dbbb-4757-892b-0774377e6aaa\") " pod="openstack/dnsmasq-dns-675f4bcbfc-tj67d" Sep 30 00:27:23 crc kubenswrapper[4809]: I0930 00:27:23.013360 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0ef4780-dbbb-4757-892b-0774377e6aaa-config\") pod \"dnsmasq-dns-675f4bcbfc-tj67d\" (UID: \"c0ef4780-dbbb-4757-892b-0774377e6aaa\") " pod="openstack/dnsmasq-dns-675f4bcbfc-tj67d" Sep 30 00:27:23 crc kubenswrapper[4809]: I0930 00:27:23.031526 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2svq5\" (UniqueName: \"kubernetes.io/projected/c0ef4780-dbbb-4757-892b-0774377e6aaa-kube-api-access-2svq5\") pod \"dnsmasq-dns-675f4bcbfc-tj67d\" (UID: \"c0ef4780-dbbb-4757-892b-0774377e6aaa\") " pod="openstack/dnsmasq-dns-675f4bcbfc-tj67d" Sep 30 00:27:23 crc kubenswrapper[4809]: I0930 00:27:23.113877 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pqms\" (UniqueName: \"kubernetes.io/projected/446f124f-36ad-46cb-a934-5e737dee0fef-kube-api-access-7pqms\") pod \"dnsmasq-dns-78dd6ddcc-klnh9\" (UID: \"446f124f-36ad-46cb-a934-5e737dee0fef\") " pod="openstack/dnsmasq-dns-78dd6ddcc-klnh9" Sep 30 00:27:23 crc kubenswrapper[4809]: I0930 00:27:23.113937 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/446f124f-36ad-46cb-a934-5e737dee0fef-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-klnh9\" (UID: \"446f124f-36ad-46cb-a934-5e737dee0fef\") " pod="openstack/dnsmasq-dns-78dd6ddcc-klnh9" Sep 30 00:27:23 crc kubenswrapper[4809]: I0930 00:27:23.113969 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/446f124f-36ad-46cb-a934-5e737dee0fef-config\") pod \"dnsmasq-dns-78dd6ddcc-klnh9\" (UID: \"446f124f-36ad-46cb-a934-5e737dee0fef\") " pod="openstack/dnsmasq-dns-78dd6ddcc-klnh9" Sep 30 00:27:23 crc kubenswrapper[4809]: I0930 00:27:23.114783 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/446f124f-36ad-46cb-a934-5e737dee0fef-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-klnh9\" (UID: \"446f124f-36ad-46cb-a934-5e737dee0fef\") " pod="openstack/dnsmasq-dns-78dd6ddcc-klnh9" Sep 30 00:27:23 crc kubenswrapper[4809]: I0930 00:27:23.114826 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/446f124f-36ad-46cb-a934-5e737dee0fef-config\") pod \"dnsmasq-dns-78dd6ddcc-klnh9\" (UID: \"446f124f-36ad-46cb-a934-5e737dee0fef\") " pod="openstack/dnsmasq-dns-78dd6ddcc-klnh9" Sep 30 00:27:23 crc kubenswrapper[4809]: I0930 00:27:23.137866 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pqms\" (UniqueName: \"kubernetes.io/projected/446f124f-36ad-46cb-a934-5e737dee0fef-kube-api-access-7pqms\") pod \"dnsmasq-dns-78dd6ddcc-klnh9\" (UID: \"446f124f-36ad-46cb-a934-5e737dee0fef\") " pod="openstack/dnsmasq-dns-78dd6ddcc-klnh9" Sep 30 00:27:23 crc kubenswrapper[4809]: I0930 00:27:23.138247 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-tj67d" Sep 30 00:27:23 crc kubenswrapper[4809]: I0930 00:27:23.212829 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-klnh9" Sep 30 00:27:23 crc kubenswrapper[4809]: I0930 00:27:23.589757 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-tj67d"] Sep 30 00:27:23 crc kubenswrapper[4809]: I0930 00:27:23.701297 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-klnh9"] Sep 30 00:27:24 crc kubenswrapper[4809]: I0930 00:27:24.385578 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-tj67d" event={"ID":"c0ef4780-dbbb-4757-892b-0774377e6aaa","Type":"ContainerStarted","Data":"37a15454105824c1256a2b27254659a205cd0109430affbd5c177d0769754641"} Sep 30 00:27:24 crc kubenswrapper[4809]: I0930 00:27:24.387345 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-klnh9" event={"ID":"446f124f-36ad-46cb-a934-5e737dee0fef","Type":"ContainerStarted","Data":"8b5b6b123d5f02958135801e3d3899e6d7abb3ea53ad804652e6ca81b339fb63"} Sep 30 00:27:25 crc kubenswrapper[4809]: I0930 00:27:25.324690 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:27:25 crc kubenswrapper[4809]: I0930 00:27:25.324757 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:27:25 crc kubenswrapper[4809]: I0930 00:27:25.998053 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-tj67d"] Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.036247 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-85wfs"] Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.038114 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-85wfs" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.045738 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-85wfs"] Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.067407 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4fb283ce-093d-4ee7-85dc-dd4765b07bed-dns-svc\") pod \"dnsmasq-dns-666b6646f7-85wfs\" (UID: \"4fb283ce-093d-4ee7-85dc-dd4765b07bed\") " pod="openstack/dnsmasq-dns-666b6646f7-85wfs" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.067535 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4fb283ce-093d-4ee7-85dc-dd4765b07bed-config\") pod \"dnsmasq-dns-666b6646f7-85wfs\" (UID: \"4fb283ce-093d-4ee7-85dc-dd4765b07bed\") " pod="openstack/dnsmasq-dns-666b6646f7-85wfs" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.067590 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6sl6\" (UniqueName: \"kubernetes.io/projected/4fb283ce-093d-4ee7-85dc-dd4765b07bed-kube-api-access-f6sl6\") pod \"dnsmasq-dns-666b6646f7-85wfs\" (UID: \"4fb283ce-093d-4ee7-85dc-dd4765b07bed\") " pod="openstack/dnsmasq-dns-666b6646f7-85wfs" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.168679 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4fb283ce-093d-4ee7-85dc-dd4765b07bed-config\") pod \"dnsmasq-dns-666b6646f7-85wfs\" (UID: \"4fb283ce-093d-4ee7-85dc-dd4765b07bed\") " pod="openstack/dnsmasq-dns-666b6646f7-85wfs" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.168763 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6sl6\" (UniqueName: \"kubernetes.io/projected/4fb283ce-093d-4ee7-85dc-dd4765b07bed-kube-api-access-f6sl6\") pod \"dnsmasq-dns-666b6646f7-85wfs\" (UID: \"4fb283ce-093d-4ee7-85dc-dd4765b07bed\") " pod="openstack/dnsmasq-dns-666b6646f7-85wfs" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.168800 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4fb283ce-093d-4ee7-85dc-dd4765b07bed-dns-svc\") pod \"dnsmasq-dns-666b6646f7-85wfs\" (UID: \"4fb283ce-093d-4ee7-85dc-dd4765b07bed\") " pod="openstack/dnsmasq-dns-666b6646f7-85wfs" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.169877 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4fb283ce-093d-4ee7-85dc-dd4765b07bed-config\") pod \"dnsmasq-dns-666b6646f7-85wfs\" (UID: \"4fb283ce-093d-4ee7-85dc-dd4765b07bed\") " pod="openstack/dnsmasq-dns-666b6646f7-85wfs" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.169980 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4fb283ce-093d-4ee7-85dc-dd4765b07bed-dns-svc\") pod \"dnsmasq-dns-666b6646f7-85wfs\" (UID: \"4fb283ce-093d-4ee7-85dc-dd4765b07bed\") " pod="openstack/dnsmasq-dns-666b6646f7-85wfs" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.203156 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6sl6\" (UniqueName: \"kubernetes.io/projected/4fb283ce-093d-4ee7-85dc-dd4765b07bed-kube-api-access-f6sl6\") pod \"dnsmasq-dns-666b6646f7-85wfs\" (UID: \"4fb283ce-093d-4ee7-85dc-dd4765b07bed\") " pod="openstack/dnsmasq-dns-666b6646f7-85wfs" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.280611 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-klnh9"] Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.304416 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-bpqmf"] Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.306624 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.325148 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-bpqmf"] Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.362374 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-85wfs" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.475138 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/69cd5ddd-031a-4a2f-993c-0e6ed61cff30-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-bpqmf\" (UID: \"69cd5ddd-031a-4a2f-993c-0e6ed61cff30\") " pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.475200 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9znkc\" (UniqueName: \"kubernetes.io/projected/69cd5ddd-031a-4a2f-993c-0e6ed61cff30-kube-api-access-9znkc\") pod \"dnsmasq-dns-57d769cc4f-bpqmf\" (UID: \"69cd5ddd-031a-4a2f-993c-0e6ed61cff30\") " pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.475303 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69cd5ddd-031a-4a2f-993c-0e6ed61cff30-config\") pod \"dnsmasq-dns-57d769cc4f-bpqmf\" (UID: \"69cd5ddd-031a-4a2f-993c-0e6ed61cff30\") " pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.577201 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69cd5ddd-031a-4a2f-993c-0e6ed61cff30-config\") pod \"dnsmasq-dns-57d769cc4f-bpqmf\" (UID: \"69cd5ddd-031a-4a2f-993c-0e6ed61cff30\") " pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.577593 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/69cd5ddd-031a-4a2f-993c-0e6ed61cff30-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-bpqmf\" (UID: \"69cd5ddd-031a-4a2f-993c-0e6ed61cff30\") " pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.577628 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9znkc\" (UniqueName: \"kubernetes.io/projected/69cd5ddd-031a-4a2f-993c-0e6ed61cff30-kube-api-access-9znkc\") pod \"dnsmasq-dns-57d769cc4f-bpqmf\" (UID: \"69cd5ddd-031a-4a2f-993c-0e6ed61cff30\") " pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.578144 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69cd5ddd-031a-4a2f-993c-0e6ed61cff30-config\") pod \"dnsmasq-dns-57d769cc4f-bpqmf\" (UID: \"69cd5ddd-031a-4a2f-993c-0e6ed61cff30\") " pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.578578 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/69cd5ddd-031a-4a2f-993c-0e6ed61cff30-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-bpqmf\" (UID: \"69cd5ddd-031a-4a2f-993c-0e6ed61cff30\") " pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.633655 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9znkc\" (UniqueName: \"kubernetes.io/projected/69cd5ddd-031a-4a2f-993c-0e6ed61cff30-kube-api-access-9znkc\") pod \"dnsmasq-dns-57d769cc4f-bpqmf\" (UID: \"69cd5ddd-031a-4a2f-993c-0e6ed61cff30\") " pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" Sep 30 00:27:26 crc kubenswrapper[4809]: I0930 00:27:26.634010 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.142131 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.143438 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.145529 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.145869 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.146921 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-k8hm5" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.147040 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.147083 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.147210 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.147315 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.157930 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.290669 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d7616359-f18a-4fba-b35a-327e65a0c05d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.290738 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.290764 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d7616359-f18a-4fba-b35a-327e65a0c05d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.290803 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.291352 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.291384 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d7616359-f18a-4fba-b35a-327e65a0c05d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.291405 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d7616359-f18a-4fba-b35a-327e65a0c05d-config-data\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.291423 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x25bj\" (UniqueName: \"kubernetes.io/projected/d7616359-f18a-4fba-b35a-327e65a0c05d-kube-api-access-x25bj\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.291451 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.291592 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.291731 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d7616359-f18a-4fba-b35a-327e65a0c05d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.393150 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.393233 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d7616359-f18a-4fba-b35a-327e65a0c05d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.393282 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d7616359-f18a-4fba-b35a-327e65a0c05d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.393307 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.393321 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d7616359-f18a-4fba-b35a-327e65a0c05d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.393348 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.393384 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.393738 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d7616359-f18a-4fba-b35a-327e65a0c05d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.395387 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d7616359-f18a-4fba-b35a-327e65a0c05d-config-data\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.395474 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x25bj\" (UniqueName: \"kubernetes.io/projected/d7616359-f18a-4fba-b35a-327e65a0c05d-kube-api-access-x25bj\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.395528 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.396079 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.406997 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d7616359-f18a-4fba-b35a-327e65a0c05d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.408522 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.409049 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d7616359-f18a-4fba-b35a-327e65a0c05d-config-data\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.409113 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d7616359-f18a-4fba-b35a-327e65a0c05d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.409695 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.411208 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d7616359-f18a-4fba-b35a-327e65a0c05d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.419932 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d7616359-f18a-4fba-b35a-327e65a0c05d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.420945 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.422477 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x25bj\" (UniqueName: \"kubernetes.io/projected/d7616359-f18a-4fba-b35a-327e65a0c05d-kube-api-access-x25bj\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.422795 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.445318 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.446861 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.449509 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.449536 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.449610 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.449847 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.450398 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.450609 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.450779 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-zh24w" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.452875 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.454660 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.476536 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.598795 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6xh2\" (UniqueName: \"kubernetes.io/projected/095a7ca7-bda1-498c-8a6d-16de67eb0a70-kube-api-access-d6xh2\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.598841 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/095a7ca7-bda1-498c-8a6d-16de67eb0a70-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.598869 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/095a7ca7-bda1-498c-8a6d-16de67eb0a70-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.598890 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.598940 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/095a7ca7-bda1-498c-8a6d-16de67eb0a70-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.599099 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.599198 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.599230 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/095a7ca7-bda1-498c-8a6d-16de67eb0a70-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.599268 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/095a7ca7-bda1-498c-8a6d-16de67eb0a70-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.599284 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.599471 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.701239 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.701325 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6xh2\" (UniqueName: \"kubernetes.io/projected/095a7ca7-bda1-498c-8a6d-16de67eb0a70-kube-api-access-d6xh2\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.701346 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/095a7ca7-bda1-498c-8a6d-16de67eb0a70-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.701366 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/095a7ca7-bda1-498c-8a6d-16de67eb0a70-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.701388 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.701410 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/095a7ca7-bda1-498c-8a6d-16de67eb0a70-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.701445 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.701475 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.701504 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/095a7ca7-bda1-498c-8a6d-16de67eb0a70-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.701523 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/095a7ca7-bda1-498c-8a6d-16de67eb0a70-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.701540 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.702070 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.702146 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.702907 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/095a7ca7-bda1-498c-8a6d-16de67eb0a70-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.703191 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.704294 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/095a7ca7-bda1-498c-8a6d-16de67eb0a70-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.704380 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/095a7ca7-bda1-498c-8a6d-16de67eb0a70-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.704948 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.706731 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.712149 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/095a7ca7-bda1-498c-8a6d-16de67eb0a70-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.712562 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/095a7ca7-bda1-498c-8a6d-16de67eb0a70-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.720401 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6xh2\" (UniqueName: \"kubernetes.io/projected/095a7ca7-bda1-498c-8a6d-16de67eb0a70-kube-api-access-d6xh2\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.728305 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:27 crc kubenswrapper[4809]: I0930 00:27:27.821991 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.231815 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.234306 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.236845 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.241117 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.241247 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.241418 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.241481 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-nvng4" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.246423 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.261752 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.278692 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.280068 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.286134 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.286299 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.286922 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-l494m" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.287079 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.299226 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.345778 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.345866 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kw7r\" (UniqueName: \"kubernetes.io/projected/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-kube-api-access-6kw7r\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.345901 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.345921 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.345976 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.346004 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.346114 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.346137 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.346168 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.447139 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.447186 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.447262 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5df7664-528d-4076-a69f-bc59afb921a1-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.447284 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5df7664-528d-4076-a69f-bc59afb921a1-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.447420 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.447484 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c5df7664-528d-4076-a69f-bc59afb921a1-secrets\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.447521 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.447536 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.447619 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.447675 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c5df7664-528d-4076-a69f-bc59afb921a1-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.447699 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.448121 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5df7664-528d-4076-a69f-bc59afb921a1-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.448176 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.448217 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbv29\" (UniqueName: \"kubernetes.io/projected/c5df7664-528d-4076-a69f-bc59afb921a1-kube-api-access-nbv29\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.448241 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kw7r\" (UniqueName: \"kubernetes.io/projected/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-kube-api-access-6kw7r\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.448260 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c5df7664-528d-4076-a69f-bc59afb921a1-config-data-default\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.448287 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.448305 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.448340 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c5df7664-528d-4076-a69f-bc59afb921a1-kolla-config\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.448533 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.449396 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.449420 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.449600 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.453806 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.454062 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.454123 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.464896 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kw7r\" (UniqueName: \"kubernetes.io/projected/f9711a36-f5cf-4143-9ebe-13efdf29aa7a-kube-api-access-6kw7r\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.478634 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f9711a36-f5cf-4143-9ebe-13efdf29aa7a\") " pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.550287 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c5df7664-528d-4076-a69f-bc59afb921a1-config-data-default\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.550351 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c5df7664-528d-4076-a69f-bc59afb921a1-kolla-config\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.550406 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5df7664-528d-4076-a69f-bc59afb921a1-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.550423 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5df7664-528d-4076-a69f-bc59afb921a1-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.550448 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.550481 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c5df7664-528d-4076-a69f-bc59afb921a1-secrets\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.550510 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c5df7664-528d-4076-a69f-bc59afb921a1-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.550526 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5df7664-528d-4076-a69f-bc59afb921a1-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.550563 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbv29\" (UniqueName: \"kubernetes.io/projected/c5df7664-528d-4076-a69f-bc59afb921a1-kube-api-access-nbv29\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.551618 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c5df7664-528d-4076-a69f-bc59afb921a1-kolla-config\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.552219 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.552414 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c5df7664-528d-4076-a69f-bc59afb921a1-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.553323 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5df7664-528d-4076-a69f-bc59afb921a1-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.553589 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c5df7664-528d-4076-a69f-bc59afb921a1-config-data-default\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.554923 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5df7664-528d-4076-a69f-bc59afb921a1-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.555187 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c5df7664-528d-4076-a69f-bc59afb921a1-secrets\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.555586 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5df7664-528d-4076-a69f-bc59afb921a1-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.568613 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.569650 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbv29\" (UniqueName: \"kubernetes.io/projected/c5df7664-528d-4076-a69f-bc59afb921a1-kube-api-access-nbv29\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.578936 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"c5df7664-528d-4076-a69f-bc59afb921a1\") " pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.602063 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.860914 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.862283 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.864299 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-9qkvt" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.864865 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.864904 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.879231 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.957234 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-st4nq\" (UniqueName: \"kubernetes.io/projected/279ac629-91a5-4dcc-9c7a-a8e64b4a1874-kube-api-access-st4nq\") pod \"memcached-0\" (UID: \"279ac629-91a5-4dcc-9c7a-a8e64b4a1874\") " pod="openstack/memcached-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.957286 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/279ac629-91a5-4dcc-9c7a-a8e64b4a1874-combined-ca-bundle\") pod \"memcached-0\" (UID: \"279ac629-91a5-4dcc-9c7a-a8e64b4a1874\") " pod="openstack/memcached-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.957341 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/279ac629-91a5-4dcc-9c7a-a8e64b4a1874-memcached-tls-certs\") pod \"memcached-0\" (UID: \"279ac629-91a5-4dcc-9c7a-a8e64b4a1874\") " pod="openstack/memcached-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.957396 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/279ac629-91a5-4dcc-9c7a-a8e64b4a1874-kolla-config\") pod \"memcached-0\" (UID: \"279ac629-91a5-4dcc-9c7a-a8e64b4a1874\") " pod="openstack/memcached-0" Sep 30 00:27:30 crc kubenswrapper[4809]: I0930 00:27:30.957434 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/279ac629-91a5-4dcc-9c7a-a8e64b4a1874-config-data\") pod \"memcached-0\" (UID: \"279ac629-91a5-4dcc-9c7a-a8e64b4a1874\") " pod="openstack/memcached-0" Sep 30 00:27:31 crc kubenswrapper[4809]: I0930 00:27:31.060592 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/279ac629-91a5-4dcc-9c7a-a8e64b4a1874-kolla-config\") pod \"memcached-0\" (UID: \"279ac629-91a5-4dcc-9c7a-a8e64b4a1874\") " pod="openstack/memcached-0" Sep 30 00:27:31 crc kubenswrapper[4809]: I0930 00:27:31.060704 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/279ac629-91a5-4dcc-9c7a-a8e64b4a1874-config-data\") pod \"memcached-0\" (UID: \"279ac629-91a5-4dcc-9c7a-a8e64b4a1874\") " pod="openstack/memcached-0" Sep 30 00:27:31 crc kubenswrapper[4809]: I0930 00:27:31.060895 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-st4nq\" (UniqueName: \"kubernetes.io/projected/279ac629-91a5-4dcc-9c7a-a8e64b4a1874-kube-api-access-st4nq\") pod \"memcached-0\" (UID: \"279ac629-91a5-4dcc-9c7a-a8e64b4a1874\") " pod="openstack/memcached-0" Sep 30 00:27:31 crc kubenswrapper[4809]: I0930 00:27:31.060929 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/279ac629-91a5-4dcc-9c7a-a8e64b4a1874-combined-ca-bundle\") pod \"memcached-0\" (UID: \"279ac629-91a5-4dcc-9c7a-a8e64b4a1874\") " pod="openstack/memcached-0" Sep 30 00:27:31 crc kubenswrapper[4809]: I0930 00:27:31.061021 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/279ac629-91a5-4dcc-9c7a-a8e64b4a1874-memcached-tls-certs\") pod \"memcached-0\" (UID: \"279ac629-91a5-4dcc-9c7a-a8e64b4a1874\") " pod="openstack/memcached-0" Sep 30 00:27:31 crc kubenswrapper[4809]: I0930 00:27:31.061743 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/279ac629-91a5-4dcc-9c7a-a8e64b4a1874-config-data\") pod \"memcached-0\" (UID: \"279ac629-91a5-4dcc-9c7a-a8e64b4a1874\") " pod="openstack/memcached-0" Sep 30 00:27:31 crc kubenswrapper[4809]: I0930 00:27:31.062013 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/279ac629-91a5-4dcc-9c7a-a8e64b4a1874-kolla-config\") pod \"memcached-0\" (UID: \"279ac629-91a5-4dcc-9c7a-a8e64b4a1874\") " pod="openstack/memcached-0" Sep 30 00:27:31 crc kubenswrapper[4809]: I0930 00:27:31.072580 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/279ac629-91a5-4dcc-9c7a-a8e64b4a1874-combined-ca-bundle\") pod \"memcached-0\" (UID: \"279ac629-91a5-4dcc-9c7a-a8e64b4a1874\") " pod="openstack/memcached-0" Sep 30 00:27:31 crc kubenswrapper[4809]: I0930 00:27:31.073079 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/279ac629-91a5-4dcc-9c7a-a8e64b4a1874-memcached-tls-certs\") pod \"memcached-0\" (UID: \"279ac629-91a5-4dcc-9c7a-a8e64b4a1874\") " pod="openstack/memcached-0" Sep 30 00:27:31 crc kubenswrapper[4809]: I0930 00:27:31.096061 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-st4nq\" (UniqueName: \"kubernetes.io/projected/279ac629-91a5-4dcc-9c7a-a8e64b4a1874-kube-api-access-st4nq\") pod \"memcached-0\" (UID: \"279ac629-91a5-4dcc-9c7a-a8e64b4a1874\") " pod="openstack/memcached-0" Sep 30 00:27:31 crc kubenswrapper[4809]: I0930 00:27:31.182380 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 00:27:32 crc kubenswrapper[4809]: I0930 00:27:32.893880 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 00:27:32 crc kubenswrapper[4809]: I0930 00:27:32.895924 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 00:27:32 crc kubenswrapper[4809]: I0930 00:27:32.902658 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 00:27:32 crc kubenswrapper[4809]: I0930 00:27:32.907725 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-j7j4q" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:32.998764 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhm4w\" (UniqueName: \"kubernetes.io/projected/f7a3ddad-430a-4a9f-8f4c-56ac6431193e-kube-api-access-nhm4w\") pod \"kube-state-metrics-0\" (UID: \"f7a3ddad-430a-4a9f-8f4c-56ac6431193e\") " pod="openstack/kube-state-metrics-0" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.100802 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhm4w\" (UniqueName: \"kubernetes.io/projected/f7a3ddad-430a-4a9f-8f4c-56ac6431193e-kube-api-access-nhm4w\") pod \"kube-state-metrics-0\" (UID: \"f7a3ddad-430a-4a9f-8f4c-56ac6431193e\") " pod="openstack/kube-state-metrics-0" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.150930 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhm4w\" (UniqueName: \"kubernetes.io/projected/f7a3ddad-430a-4a9f-8f4c-56ac6431193e-kube-api-access-nhm4w\") pod \"kube-state-metrics-0\" (UID: \"f7a3ddad-430a-4a9f-8f4c-56ac6431193e\") " pod="openstack/kube-state-metrics-0" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.228610 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.475778 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-ui-dashboards-6584dc9448-jw9fb"] Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.477299 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-ui-dashboards-6584dc9448-jw9fb" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.481132 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-ui-dashboards-sa-dockercfg-ls28k" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.481354 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-ui-dashboards" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.488806 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-ui-dashboards-6584dc9448-jw9fb"] Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.684961 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b25950e5-3b7e-49e9-bd6a-2e7c645bd468-serving-cert\") pod \"observability-ui-dashboards-6584dc9448-jw9fb\" (UID: \"b25950e5-3b7e-49e9-bd6a-2e7c645bd468\") " pod="openshift-operators/observability-ui-dashboards-6584dc9448-jw9fb" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.686447 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szzjl\" (UniqueName: \"kubernetes.io/projected/b25950e5-3b7e-49e9-bd6a-2e7c645bd468-kube-api-access-szzjl\") pod \"observability-ui-dashboards-6584dc9448-jw9fb\" (UID: \"b25950e5-3b7e-49e9-bd6a-2e7c645bd468\") " pod="openshift-operators/observability-ui-dashboards-6584dc9448-jw9fb" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.789499 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b25950e5-3b7e-49e9-bd6a-2e7c645bd468-serving-cert\") pod \"observability-ui-dashboards-6584dc9448-jw9fb\" (UID: \"b25950e5-3b7e-49e9-bd6a-2e7c645bd468\") " pod="openshift-operators/observability-ui-dashboards-6584dc9448-jw9fb" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.789617 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szzjl\" (UniqueName: \"kubernetes.io/projected/b25950e5-3b7e-49e9-bd6a-2e7c645bd468-kube-api-access-szzjl\") pod \"observability-ui-dashboards-6584dc9448-jw9fb\" (UID: \"b25950e5-3b7e-49e9-bd6a-2e7c645bd468\") " pod="openshift-operators/observability-ui-dashboards-6584dc9448-jw9fb" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.795394 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b25950e5-3b7e-49e9-bd6a-2e7c645bd468-serving-cert\") pod \"observability-ui-dashboards-6584dc9448-jw9fb\" (UID: \"b25950e5-3b7e-49e9-bd6a-2e7c645bd468\") " pod="openshift-operators/observability-ui-dashboards-6584dc9448-jw9fb" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.819538 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szzjl\" (UniqueName: \"kubernetes.io/projected/b25950e5-3b7e-49e9-bd6a-2e7c645bd468-kube-api-access-szzjl\") pod \"observability-ui-dashboards-6584dc9448-jw9fb\" (UID: \"b25950e5-3b7e-49e9-bd6a-2e7c645bd468\") " pod="openshift-operators/observability-ui-dashboards-6584dc9448-jw9fb" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.918764 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-8697566b4d-4mkts"] Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.919997 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.935215 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-8697566b4d-4mkts"] Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.993108 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/11ead078-cefd-4792-9765-30027002fe00-console-serving-cert\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.993586 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/11ead078-cefd-4792-9765-30027002fe00-console-config\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.993695 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/11ead078-cefd-4792-9765-30027002fe00-service-ca\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.993827 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/11ead078-cefd-4792-9765-30027002fe00-console-oauth-config\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.993936 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/11ead078-cefd-4792-9765-30027002fe00-oauth-serving-cert\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.994029 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9bhl\" (UniqueName: \"kubernetes.io/projected/11ead078-cefd-4792-9765-30027002fe00-kube-api-access-p9bhl\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:33 crc kubenswrapper[4809]: I0930 00:27:33.994100 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/11ead078-cefd-4792-9765-30027002fe00-trusted-ca-bundle\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.095045 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/11ead078-cefd-4792-9765-30027002fe00-console-config\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.095109 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/11ead078-cefd-4792-9765-30027002fe00-service-ca\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.095141 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/11ead078-cefd-4792-9765-30027002fe00-console-oauth-config\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.095191 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/11ead078-cefd-4792-9765-30027002fe00-oauth-serving-cert\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.095252 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9bhl\" (UniqueName: \"kubernetes.io/projected/11ead078-cefd-4792-9765-30027002fe00-kube-api-access-p9bhl\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.095277 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/11ead078-cefd-4792-9765-30027002fe00-trusted-ca-bundle\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.095314 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/11ead078-cefd-4792-9765-30027002fe00-console-serving-cert\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.095955 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/11ead078-cefd-4792-9765-30027002fe00-console-config\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.097000 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/11ead078-cefd-4792-9765-30027002fe00-oauth-serving-cert\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.097078 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/11ead078-cefd-4792-9765-30027002fe00-service-ca\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.097442 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/11ead078-cefd-4792-9765-30027002fe00-trusted-ca-bundle\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.098827 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.104754 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.108572 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-ui-dashboards-6584dc9448-jw9fb" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.113717 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.113928 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.114174 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.115054 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-nv5s2" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.116782 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/11ead078-cefd-4792-9765-30027002fe00-console-serving-cert\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.117264 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/11ead078-cefd-4792-9765-30027002fe00-console-oauth-config\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.117319 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.119303 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.121445 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9bhl\" (UniqueName: \"kubernetes.io/projected/11ead078-cefd-4792-9765-30027002fe00-kube-api-access-p9bhl\") pod \"console-8697566b4d-4mkts\" (UID: \"11ead078-cefd-4792-9765-30027002fe00\") " pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.138404 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.238495 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.298190 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/5681f926-bb52-4288-b0cc-ca30e087457d-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.298246 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/5681f926-bb52-4288-b0cc-ca30e087457d-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.298300 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/5681f926-bb52-4288-b0cc-ca30e087457d-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.298326 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/5681f926-bb52-4288-b0cc-ca30e087457d-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.298365 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/5681f926-bb52-4288-b0cc-ca30e087457d-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.298485 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c58cec56-19b8-4689-9986-ba07a43850e3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c58cec56-19b8-4689-9986-ba07a43850e3\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.298526 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5745h\" (UniqueName: \"kubernetes.io/projected/5681f926-bb52-4288-b0cc-ca30e087457d-kube-api-access-5745h\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.298621 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5681f926-bb52-4288-b0cc-ca30e087457d-config\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.399980 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/5681f926-bb52-4288-b0cc-ca30e087457d-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.400047 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c58cec56-19b8-4689-9986-ba07a43850e3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c58cec56-19b8-4689-9986-ba07a43850e3\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.400075 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5745h\" (UniqueName: \"kubernetes.io/projected/5681f926-bb52-4288-b0cc-ca30e087457d-kube-api-access-5745h\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.400130 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5681f926-bb52-4288-b0cc-ca30e087457d-config\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.400153 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/5681f926-bb52-4288-b0cc-ca30e087457d-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.400175 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/5681f926-bb52-4288-b0cc-ca30e087457d-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.400216 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/5681f926-bb52-4288-b0cc-ca30e087457d-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.400240 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/5681f926-bb52-4288-b0cc-ca30e087457d-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.401526 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/5681f926-bb52-4288-b0cc-ca30e087457d-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.408151 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/5681f926-bb52-4288-b0cc-ca30e087457d-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.408380 4809 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.408403 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c58cec56-19b8-4689-9986-ba07a43850e3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c58cec56-19b8-4689-9986-ba07a43850e3\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/05ba422c963d7c6b869a3db801182b4e4c7098cadb23639abf6cf1f8f773ff1a/globalmount\"" pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.408920 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/5681f926-bb52-4288-b0cc-ca30e087457d-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.411867 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/5681f926-bb52-4288-b0cc-ca30e087457d-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.415742 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/5681f926-bb52-4288-b0cc-ca30e087457d-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.423626 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/5681f926-bb52-4288-b0cc-ca30e087457d-config\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.430084 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5745h\" (UniqueName: \"kubernetes.io/projected/5681f926-bb52-4288-b0cc-ca30e087457d-kube-api-access-5745h\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.453925 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c58cec56-19b8-4689-9986-ba07a43850e3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c58cec56-19b8-4689-9986-ba07a43850e3\") pod \"prometheus-metric-storage-0\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:34 crc kubenswrapper[4809]: I0930 00:27:34.488821 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.379106 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.384685 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.387206 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.387458 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.387590 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-m22nw" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.388727 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.389034 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.399118 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.544173 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3c991006-090a-45ae-afec-388e497dd0ad-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.544279 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.544324 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c991006-090a-45ae-afec-388e497dd0ad-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.544359 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c991006-090a-45ae-afec-388e497dd0ad-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.544382 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgthh\" (UniqueName: \"kubernetes.io/projected/3c991006-090a-45ae-afec-388e497dd0ad-kube-api-access-hgthh\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.544419 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c991006-090a-45ae-afec-388e497dd0ad-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.544488 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c991006-090a-45ae-afec-388e497dd0ad-config\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.544527 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3c991006-090a-45ae-afec-388e497dd0ad-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.646136 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c991006-090a-45ae-afec-388e497dd0ad-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.646206 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c991006-090a-45ae-afec-388e497dd0ad-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.646229 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgthh\" (UniqueName: \"kubernetes.io/projected/3c991006-090a-45ae-afec-388e497dd0ad-kube-api-access-hgthh\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.646261 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c991006-090a-45ae-afec-388e497dd0ad-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.646324 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c991006-090a-45ae-afec-388e497dd0ad-config\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.646359 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3c991006-090a-45ae-afec-388e497dd0ad-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.646381 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3c991006-090a-45ae-afec-388e497dd0ad-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.646436 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.646806 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.651247 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3c991006-090a-45ae-afec-388e497dd0ad-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.652157 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3c991006-090a-45ae-afec-388e497dd0ad-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.652402 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c991006-090a-45ae-afec-388e497dd0ad-config\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.654384 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c991006-090a-45ae-afec-388e497dd0ad-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.661927 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c991006-090a-45ae-afec-388e497dd0ad-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.662724 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c991006-090a-45ae-afec-388e497dd0ad-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.669807 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgthh\" (UniqueName: \"kubernetes.io/projected/3c991006-090a-45ae-afec-388e497dd0ad-kube-api-access-hgthh\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.695161 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3c991006-090a-45ae-afec-388e497dd0ad\") " pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.713782 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.835335 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-swbnb"] Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.838420 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-swbnb" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.842947 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.846044 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-xg74n" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.846952 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.852557 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-wgkv7"] Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.854361 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.867028 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-swbnb"] Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.887778 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-wgkv7"] Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.953382 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c028cda5-76ab-45af-b4ab-72332d0471a0-scripts\") pod \"ovn-controller-ovs-wgkv7\" (UID: \"c028cda5-76ab-45af-b4ab-72332d0471a0\") " pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.953461 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/40f19224-d223-4f7c-ad39-3afc9a8c18b1-var-log-ovn\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.953509 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/c028cda5-76ab-45af-b4ab-72332d0471a0-etc-ovs\") pod \"ovn-controller-ovs-wgkv7\" (UID: \"c028cda5-76ab-45af-b4ab-72332d0471a0\") " pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.953590 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/c028cda5-76ab-45af-b4ab-72332d0471a0-var-log\") pod \"ovn-controller-ovs-wgkv7\" (UID: \"c028cda5-76ab-45af-b4ab-72332d0471a0\") " pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.953627 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/40f19224-d223-4f7c-ad39-3afc9a8c18b1-scripts\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.953843 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c028cda5-76ab-45af-b4ab-72332d0471a0-var-run\") pod \"ovn-controller-ovs-wgkv7\" (UID: \"c028cda5-76ab-45af-b4ab-72332d0471a0\") " pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.953871 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/40f19224-d223-4f7c-ad39-3afc9a8c18b1-var-run-ovn\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.953908 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f19224-d223-4f7c-ad39-3afc9a8c18b1-ovn-controller-tls-certs\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.954095 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f19224-d223-4f7c-ad39-3afc9a8c18b1-combined-ca-bundle\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.954191 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78wff\" (UniqueName: \"kubernetes.io/projected/c028cda5-76ab-45af-b4ab-72332d0471a0-kube-api-access-78wff\") pod \"ovn-controller-ovs-wgkv7\" (UID: \"c028cda5-76ab-45af-b4ab-72332d0471a0\") " pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.954310 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/c028cda5-76ab-45af-b4ab-72332d0471a0-var-lib\") pod \"ovn-controller-ovs-wgkv7\" (UID: \"c028cda5-76ab-45af-b4ab-72332d0471a0\") " pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.954413 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/40f19224-d223-4f7c-ad39-3afc9a8c18b1-var-run\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:36 crc kubenswrapper[4809]: I0930 00:27:36.954441 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5thj\" (UniqueName: \"kubernetes.io/projected/40f19224-d223-4f7c-ad39-3afc9a8c18b1-kube-api-access-r5thj\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.055595 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/c028cda5-76ab-45af-b4ab-72332d0471a0-var-log\") pod \"ovn-controller-ovs-wgkv7\" (UID: \"c028cda5-76ab-45af-b4ab-72332d0471a0\") " pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.055926 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/40f19224-d223-4f7c-ad39-3afc9a8c18b1-scripts\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.055961 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c028cda5-76ab-45af-b4ab-72332d0471a0-var-run\") pod \"ovn-controller-ovs-wgkv7\" (UID: \"c028cda5-76ab-45af-b4ab-72332d0471a0\") " pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.055990 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/40f19224-d223-4f7c-ad39-3afc9a8c18b1-var-run-ovn\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.056036 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f19224-d223-4f7c-ad39-3afc9a8c18b1-ovn-controller-tls-certs\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.056076 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f19224-d223-4f7c-ad39-3afc9a8c18b1-combined-ca-bundle\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.056098 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78wff\" (UniqueName: \"kubernetes.io/projected/c028cda5-76ab-45af-b4ab-72332d0471a0-kube-api-access-78wff\") pod \"ovn-controller-ovs-wgkv7\" (UID: \"c028cda5-76ab-45af-b4ab-72332d0471a0\") " pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.056137 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/c028cda5-76ab-45af-b4ab-72332d0471a0-var-lib\") pod \"ovn-controller-ovs-wgkv7\" (UID: \"c028cda5-76ab-45af-b4ab-72332d0471a0\") " pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.056159 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/40f19224-d223-4f7c-ad39-3afc9a8c18b1-var-run\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.056174 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5thj\" (UniqueName: \"kubernetes.io/projected/40f19224-d223-4f7c-ad39-3afc9a8c18b1-kube-api-access-r5thj\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.056178 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/c028cda5-76ab-45af-b4ab-72332d0471a0-var-log\") pod \"ovn-controller-ovs-wgkv7\" (UID: \"c028cda5-76ab-45af-b4ab-72332d0471a0\") " pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.056201 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c028cda5-76ab-45af-b4ab-72332d0471a0-scripts\") pod \"ovn-controller-ovs-wgkv7\" (UID: \"c028cda5-76ab-45af-b4ab-72332d0471a0\") " pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.056307 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/40f19224-d223-4f7c-ad39-3afc9a8c18b1-var-log-ovn\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.056373 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/c028cda5-76ab-45af-b4ab-72332d0471a0-etc-ovs\") pod \"ovn-controller-ovs-wgkv7\" (UID: \"c028cda5-76ab-45af-b4ab-72332d0471a0\") " pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.056690 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/c028cda5-76ab-45af-b4ab-72332d0471a0-etc-ovs\") pod \"ovn-controller-ovs-wgkv7\" (UID: \"c028cda5-76ab-45af-b4ab-72332d0471a0\") " pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.056774 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/40f19224-d223-4f7c-ad39-3afc9a8c18b1-var-log-ovn\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.057406 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/40f19224-d223-4f7c-ad39-3afc9a8c18b1-var-run-ovn\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.057542 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c028cda5-76ab-45af-b4ab-72332d0471a0-var-run\") pod \"ovn-controller-ovs-wgkv7\" (UID: \"c028cda5-76ab-45af-b4ab-72332d0471a0\") " pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.057743 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/c028cda5-76ab-45af-b4ab-72332d0471a0-var-lib\") pod \"ovn-controller-ovs-wgkv7\" (UID: \"c028cda5-76ab-45af-b4ab-72332d0471a0\") " pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.058187 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/40f19224-d223-4f7c-ad39-3afc9a8c18b1-var-run\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.058242 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c028cda5-76ab-45af-b4ab-72332d0471a0-scripts\") pod \"ovn-controller-ovs-wgkv7\" (UID: \"c028cda5-76ab-45af-b4ab-72332d0471a0\") " pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.058839 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/40f19224-d223-4f7c-ad39-3afc9a8c18b1-scripts\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.060302 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f19224-d223-4f7c-ad39-3afc9a8c18b1-combined-ca-bundle\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.060545 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f19224-d223-4f7c-ad39-3afc9a8c18b1-ovn-controller-tls-certs\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.081384 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78wff\" (UniqueName: \"kubernetes.io/projected/c028cda5-76ab-45af-b4ab-72332d0471a0-kube-api-access-78wff\") pod \"ovn-controller-ovs-wgkv7\" (UID: \"c028cda5-76ab-45af-b4ab-72332d0471a0\") " pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.081471 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5thj\" (UniqueName: \"kubernetes.io/projected/40f19224-d223-4f7c-ad39-3afc9a8c18b1-kube-api-access-r5thj\") pod \"ovn-controller-swbnb\" (UID: \"40f19224-d223-4f7c-ad39-3afc9a8c18b1\") " pod="openstack/ovn-controller-swbnb" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.163332 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-swbnb" Sep 30 00:27:37 crc kubenswrapper[4809]: I0930 00:27:37.187202 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:38 crc kubenswrapper[4809]: I0930 00:27:38.867376 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.090805 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.092380 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.095504 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.095573 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.095625 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-jqrm9" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.096027 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.105264 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.226531 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.226577 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dw62p\" (UniqueName: \"kubernetes.io/projected/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-kube-api-access-dw62p\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.226605 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.226911 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.226938 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-config\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.226991 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.227020 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.227052 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.328693 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.328734 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-config\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.328805 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.328838 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.328878 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.328929 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.328956 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dw62p\" (UniqueName: \"kubernetes.io/projected/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-kube-api-access-dw62p\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.329043 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.329377 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.330224 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.330717 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-config\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.330829 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.333941 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.333955 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.346626 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.349423 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dw62p\" (UniqueName: \"kubernetes.io/projected/2cfa1ec3-7355-4a69-a63f-ee850e1e8e79-kube-api-access-dw62p\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.360557 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79\") " pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: I0930 00:27:40.411392 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 00:27:40 crc kubenswrapper[4809]: E0930 00:27:40.506354 4809 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 30 00:27:40 crc kubenswrapper[4809]: E0930 00:27:40.506574 4809 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2svq5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-tj67d_openstack(c0ef4780-dbbb-4757-892b-0774377e6aaa): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 00:27:40 crc kubenswrapper[4809]: E0930 00:27:40.507788 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-tj67d" podUID="c0ef4780-dbbb-4757-892b-0774377e6aaa" Sep 30 00:27:40 crc kubenswrapper[4809]: E0930 00:27:40.523330 4809 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 30 00:27:40 crc kubenswrapper[4809]: E0930 00:27:40.523618 4809 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7pqms,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-klnh9_openstack(446f124f-36ad-46cb-a934-5e737dee0fef): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 00:27:40 crc kubenswrapper[4809]: E0930 00:27:40.525099 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-klnh9" podUID="446f124f-36ad-46cb-a934-5e737dee0fef" Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.231194 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-klnh9" Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.359824 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7pqms\" (UniqueName: \"kubernetes.io/projected/446f124f-36ad-46cb-a934-5e737dee0fef-kube-api-access-7pqms\") pod \"446f124f-36ad-46cb-a934-5e737dee0fef\" (UID: \"446f124f-36ad-46cb-a934-5e737dee0fef\") " Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.359870 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/446f124f-36ad-46cb-a934-5e737dee0fef-config\") pod \"446f124f-36ad-46cb-a934-5e737dee0fef\" (UID: \"446f124f-36ad-46cb-a934-5e737dee0fef\") " Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.359902 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/446f124f-36ad-46cb-a934-5e737dee0fef-dns-svc\") pod \"446f124f-36ad-46cb-a934-5e737dee0fef\" (UID: \"446f124f-36ad-46cb-a934-5e737dee0fef\") " Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.361018 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/446f124f-36ad-46cb-a934-5e737dee0fef-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "446f124f-36ad-46cb-a934-5e737dee0fef" (UID: "446f124f-36ad-46cb-a934-5e737dee0fef"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.361407 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/446f124f-36ad-46cb-a934-5e737dee0fef-config" (OuterVolumeSpecName: "config") pod "446f124f-36ad-46cb-a934-5e737dee0fef" (UID: "446f124f-36ad-46cb-a934-5e737dee0fef"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.369739 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/446f124f-36ad-46cb-a934-5e737dee0fef-kube-api-access-7pqms" (OuterVolumeSpecName: "kube-api-access-7pqms") pod "446f124f-36ad-46cb-a934-5e737dee0fef" (UID: "446f124f-36ad-46cb-a934-5e737dee0fef"). InnerVolumeSpecName "kube-api-access-7pqms". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.461994 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7pqms\" (UniqueName: \"kubernetes.io/projected/446f124f-36ad-46cb-a934-5e737dee0fef-kube-api-access-7pqms\") on node \"crc\" DevicePath \"\"" Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.462027 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/446f124f-36ad-46cb-a934-5e737dee0fef-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.462038 4809 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/446f124f-36ad-46cb-a934-5e737dee0fef-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.561429 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-tj67d" Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.572489 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d7616359-f18a-4fba-b35a-327e65a0c05d","Type":"ContainerStarted","Data":"0d604000bc6cf32c7c0e38df887ee431cdaae3de4e04c750bd69da96f802c6f3"} Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.581571 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-tj67d" event={"ID":"c0ef4780-dbbb-4757-892b-0774377e6aaa","Type":"ContainerDied","Data":"37a15454105824c1256a2b27254659a205cd0109430affbd5c177d0769754641"} Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.581685 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-tj67d" Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.584306 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-klnh9" event={"ID":"446f124f-36ad-46cb-a934-5e737dee0fef","Type":"ContainerDied","Data":"8b5b6b123d5f02958135801e3d3899e6d7abb3ea53ad804652e6ca81b339fb63"} Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.584358 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-klnh9" Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.644592 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-klnh9"] Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.652124 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-klnh9"] Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.666114 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2svq5\" (UniqueName: \"kubernetes.io/projected/c0ef4780-dbbb-4757-892b-0774377e6aaa-kube-api-access-2svq5\") pod \"c0ef4780-dbbb-4757-892b-0774377e6aaa\" (UID: \"c0ef4780-dbbb-4757-892b-0774377e6aaa\") " Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.666165 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0ef4780-dbbb-4757-892b-0774377e6aaa-config\") pod \"c0ef4780-dbbb-4757-892b-0774377e6aaa\" (UID: \"c0ef4780-dbbb-4757-892b-0774377e6aaa\") " Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.666767 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0ef4780-dbbb-4757-892b-0774377e6aaa-config" (OuterVolumeSpecName: "config") pod "c0ef4780-dbbb-4757-892b-0774377e6aaa" (UID: "c0ef4780-dbbb-4757-892b-0774377e6aaa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.671861 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0ef4780-dbbb-4757-892b-0774377e6aaa-kube-api-access-2svq5" (OuterVolumeSpecName: "kube-api-access-2svq5") pod "c0ef4780-dbbb-4757-892b-0774377e6aaa" (UID: "c0ef4780-dbbb-4757-892b-0774377e6aaa"). InnerVolumeSpecName "kube-api-access-2svq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.719903 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="446f124f-36ad-46cb-a934-5e737dee0fef" path="/var/lib/kubelet/pods/446f124f-36ad-46cb-a934-5e737dee0fef/volumes" Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.768322 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2svq5\" (UniqueName: \"kubernetes.io/projected/c0ef4780-dbbb-4757-892b-0774377e6aaa-kube-api-access-2svq5\") on node \"crc\" DevicePath \"\"" Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.768360 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0ef4780-dbbb-4757-892b-0774377e6aaa-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.915346 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-tj67d"] Sep 30 00:27:41 crc kubenswrapper[4809]: I0930 00:27:41.921840 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-tj67d"] Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.281953 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-8697566b4d-4mkts"] Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.299838 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-swbnb"] Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.312607 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 00:27:42 crc kubenswrapper[4809]: W0930 00:27:42.315381 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5681f926_bb52_4288_b0cc_ca30e087457d.slice/crio-279b74d25ec6d6c76f35d971da22e2f80b1fe1e77e31dbe22f583408171c8511 WatchSource:0}: Error finding container 279b74d25ec6d6c76f35d971da22e2f80b1fe1e77e31dbe22f583408171c8511: Status 404 returned error can't find the container with id 279b74d25ec6d6c76f35d971da22e2f80b1fe1e77e31dbe22f583408171c8511 Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.321055 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 00:27:42 crc kubenswrapper[4809]: W0930 00:27:42.325994 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod279ac629_91a5_4dcc_9c7a_a8e64b4a1874.slice/crio-b182ab9600b614f09f5069a116e713610bb9c026c57cc6daed7751a5797233d2 WatchSource:0}: Error finding container b182ab9600b614f09f5069a116e713610bb9c026c57cc6daed7751a5797233d2: Status 404 returned error can't find the container with id b182ab9600b614f09f5069a116e713610bb9c026c57cc6daed7751a5797233d2 Sep 30 00:27:42 crc kubenswrapper[4809]: W0930 00:27:42.327013 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69cd5ddd_031a_4a2f_993c_0e6ed61cff30.slice/crio-be96a0c29e3a819d1296fcb1c64bde912bb736dd52d65755fe7922529cbff68a WatchSource:0}: Error finding container be96a0c29e3a819d1296fcb1c64bde912bb736dd52d65755fe7922529cbff68a: Status 404 returned error can't find the container with id be96a0c29e3a819d1296fcb1c64bde912bb736dd52d65755fe7922529cbff68a Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.328927 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.349749 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.374315 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.384201 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-bpqmf"] Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.396628 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.410774 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-85wfs"] Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.414372 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-ui-dashboards-6584dc9448-jw9fb"] Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.594850 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-ui-dashboards-6584dc9448-jw9fb" event={"ID":"b25950e5-3b7e-49e9-bd6a-2e7c645bd468","Type":"ContainerStarted","Data":"dd909d05ea75794741e9474e692db53c9dbc21ea1d3edaf9a1188ce9714f8bab"} Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.596520 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-swbnb" event={"ID":"40f19224-d223-4f7c-ad39-3afc9a8c18b1","Type":"ContainerStarted","Data":"6421b34584b8a1ce266e3724332a94b6bcb2e5c845212b36a93044c7ccbc5fdb"} Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.602997 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-85wfs" event={"ID":"4fb283ce-093d-4ee7-85dc-dd4765b07bed","Type":"ContainerStarted","Data":"bcdf0aea462d260e5df8b0ccfb7aeb268273efa5a999513bde7d78ff73f792e2"} Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.606730 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-8697566b4d-4mkts" event={"ID":"11ead078-cefd-4792-9765-30027002fe00","Type":"ContainerStarted","Data":"cec666e31912fe3605485d85722a93cbcb25b3f3a90614bd55971ba36d6fb700"} Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.607876 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c5df7664-528d-4076-a69f-bc59afb921a1","Type":"ContainerStarted","Data":"b1e07567d36fcc18cd0ce8870987fd2f17efe399d5ef6a4070d7ac1215706f9b"} Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.608947 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"279ac629-91a5-4dcc-9c7a-a8e64b4a1874","Type":"ContainerStarted","Data":"b182ab9600b614f09f5069a116e713610bb9c026c57cc6daed7751a5797233d2"} Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.609713 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5681f926-bb52-4288-b0cc-ca30e087457d","Type":"ContainerStarted","Data":"279b74d25ec6d6c76f35d971da22e2f80b1fe1e77e31dbe22f583408171c8511"} Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.610496 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f9711a36-f5cf-4143-9ebe-13efdf29aa7a","Type":"ContainerStarted","Data":"19a1cab6c081573e4ad4530927836f24042895d4afb34442bea9ea75c2233ccb"} Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.612416 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"095a7ca7-bda1-498c-8a6d-16de67eb0a70","Type":"ContainerStarted","Data":"1dce1b12e97c7a8f03bf135e48c0d43730447bb5f5046719c62ff83fd7f50975"} Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.612450 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-wgkv7"] Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.613601 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f7a3ddad-430a-4a9f-8f4c-56ac6431193e","Type":"ContainerStarted","Data":"1191142c5ff18757a5884dffefc4270e022207a272cebd0e5d68c63db9ae6de3"} Sep 30 00:27:42 crc kubenswrapper[4809]: I0930 00:27:42.614562 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" event={"ID":"69cd5ddd-031a-4a2f-993c-0e6ed61cff30","Type":"ContainerStarted","Data":"be96a0c29e3a819d1296fcb1c64bde912bb736dd52d65755fe7922529cbff68a"} Sep 30 00:27:43 crc kubenswrapper[4809]: I0930 00:27:43.196004 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 00:27:43 crc kubenswrapper[4809]: I0930 00:27:43.417928 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 00:27:43 crc kubenswrapper[4809]: I0930 00:27:43.654851 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79","Type":"ContainerStarted","Data":"5be7fb5f4b9696ef0fbbc368c983339d4da9e18c73c119d700b4a32a973f183a"} Sep 30 00:27:43 crc kubenswrapper[4809]: I0930 00:27:43.656351 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3c991006-090a-45ae-afec-388e497dd0ad","Type":"ContainerStarted","Data":"1f80425a24849329de3c2c2324763f63f7abf507a6c1c3c701f9ec036d4874af"} Sep 30 00:27:43 crc kubenswrapper[4809]: I0930 00:27:43.659757 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-wgkv7" event={"ID":"c028cda5-76ab-45af-b4ab-72332d0471a0","Type":"ContainerStarted","Data":"92fa31bd9313b2351e55d8c2cf01b92626f4dfd77ede12854ac24de2ed060549"} Sep 30 00:27:43 crc kubenswrapper[4809]: I0930 00:27:43.710960 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0ef4780-dbbb-4757-892b-0774377e6aaa" path="/var/lib/kubelet/pods/c0ef4780-dbbb-4757-892b-0774377e6aaa/volumes" Sep 30 00:27:44 crc kubenswrapper[4809]: I0930 00:27:44.671977 4809 generic.go:334] "Generic (PLEG): container finished" podID="69cd5ddd-031a-4a2f-993c-0e6ed61cff30" containerID="44937d2ce766c94bcfdfb5f438244dee29404305246ff4def965881e41adc453" exitCode=0 Sep 30 00:27:44 crc kubenswrapper[4809]: I0930 00:27:44.672300 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" event={"ID":"69cd5ddd-031a-4a2f-993c-0e6ed61cff30","Type":"ContainerDied","Data":"44937d2ce766c94bcfdfb5f438244dee29404305246ff4def965881e41adc453"} Sep 30 00:27:44 crc kubenswrapper[4809]: I0930 00:27:44.674770 4809 generic.go:334] "Generic (PLEG): container finished" podID="4fb283ce-093d-4ee7-85dc-dd4765b07bed" containerID="fd380805c59d8901f9266ea31250814bc5c9fc4e01ac00896719ab7673771304" exitCode=0 Sep 30 00:27:44 crc kubenswrapper[4809]: I0930 00:27:44.674860 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-85wfs" event={"ID":"4fb283ce-093d-4ee7-85dc-dd4765b07bed","Type":"ContainerDied","Data":"fd380805c59d8901f9266ea31250814bc5c9fc4e01ac00896719ab7673771304"} Sep 30 00:27:44 crc kubenswrapper[4809]: I0930 00:27:44.677598 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"095a7ca7-bda1-498c-8a6d-16de67eb0a70","Type":"ContainerStarted","Data":"743f47fb70d0bb64c618e80220574e87e303504d35695209df55120b5be8211a"} Sep 30 00:27:44 crc kubenswrapper[4809]: I0930 00:27:44.680171 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-8697566b4d-4mkts" event={"ID":"11ead078-cefd-4792-9765-30027002fe00","Type":"ContainerStarted","Data":"908016bdac236ded739b2633a87da5a0a313e89e8d0c45ba4d69ff79961eb2b7"} Sep 30 00:27:44 crc kubenswrapper[4809]: I0930 00:27:44.682082 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d7616359-f18a-4fba-b35a-327e65a0c05d","Type":"ContainerStarted","Data":"d069e4cc0bde9e03aa2c4113f1c7695e4fca7141970b029715da52511e304260"} Sep 30 00:27:44 crc kubenswrapper[4809]: I0930 00:27:44.751963 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-8697566b4d-4mkts" podStartSLOduration=11.751939784 podStartE2EDuration="11.751939784s" podCreationTimestamp="2025-09-30 00:27:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:27:44.744223156 +0000 UTC m=+1115.780472574" watchObservedRunningTime="2025-09-30 00:27:44.751939784 +0000 UTC m=+1115.788189192" Sep 30 00:27:54 crc kubenswrapper[4809]: I0930 00:27:54.238834 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:54 crc kubenswrapper[4809]: I0930 00:27:54.239406 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:54 crc kubenswrapper[4809]: I0930 00:27:54.244460 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:54 crc kubenswrapper[4809]: I0930 00:27:54.801770 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-8697566b4d-4mkts" Sep 30 00:27:54 crc kubenswrapper[4809]: I0930 00:27:54.857174 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-78fb9bb869-z7t6w"] Sep 30 00:27:55 crc kubenswrapper[4809]: I0930 00:27:55.324579 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:27:55 crc kubenswrapper[4809]: I0930 00:27:55.324971 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:27:55 crc kubenswrapper[4809]: I0930 00:27:55.325028 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:27:55 crc kubenswrapper[4809]: I0930 00:27:55.325820 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"980f1cfcbca928da130d5259c71b79d3b5762bb3b4baa2fd8b3f457e575da03a"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:27:55 crc kubenswrapper[4809]: I0930 00:27:55.325911 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://980f1cfcbca928da130d5259c71b79d3b5762bb3b4baa2fd8b3f457e575da03a" gracePeriod=600 Sep 30 00:27:55 crc kubenswrapper[4809]: I0930 00:27:55.816214 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="980f1cfcbca928da130d5259c71b79d3b5762bb3b4baa2fd8b3f457e575da03a" exitCode=0 Sep 30 00:27:55 crc kubenswrapper[4809]: I0930 00:27:55.816256 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"980f1cfcbca928da130d5259c71b79d3b5762bb3b4baa2fd8b3f457e575da03a"} Sep 30 00:27:55 crc kubenswrapper[4809]: I0930 00:27:55.816643 4809 scope.go:117] "RemoveContainer" containerID="1c9f0940c0710f8074f11d7eb6412ad3db197af8b05e861bb012eb4cb786e097" Sep 30 00:27:55 crc kubenswrapper[4809]: I0930 00:27:55.845111 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-85wfs" event={"ID":"4fb283ce-093d-4ee7-85dc-dd4765b07bed","Type":"ContainerStarted","Data":"a54e58963a328cd80541a0cbf3f018ddd5ecf2e34795aa948c6298be681628c9"} Sep 30 00:27:55 crc kubenswrapper[4809]: I0930 00:27:55.845391 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-85wfs" Sep 30 00:27:55 crc kubenswrapper[4809]: I0930 00:27:55.849421 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" event={"ID":"69cd5ddd-031a-4a2f-993c-0e6ed61cff30","Type":"ContainerStarted","Data":"39d680e2d9e3a8c17c894b4e77955094eadf6d8f8e64a2ac8747d44b97b08508"} Sep 30 00:27:55 crc kubenswrapper[4809]: I0930 00:27:55.885803 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" podStartSLOduration=28.622885309 podStartE2EDuration="29.885783803s" podCreationTimestamp="2025-09-30 00:27:26 +0000 UTC" firstStartedPulling="2025-09-30 00:27:42.330905049 +0000 UTC m=+1113.367154457" lastFinishedPulling="2025-09-30 00:27:43.593803543 +0000 UTC m=+1114.630052951" observedRunningTime="2025-09-30 00:27:55.882911205 +0000 UTC m=+1126.919160613" watchObservedRunningTime="2025-09-30 00:27:55.885783803 +0000 UTC m=+1126.922033211" Sep 30 00:27:55 crc kubenswrapper[4809]: I0930 00:27:55.887491 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-85wfs" podStartSLOduration=28.675993058 podStartE2EDuration="29.887484968s" podCreationTimestamp="2025-09-30 00:27:26 +0000 UTC" firstStartedPulling="2025-09-30 00:27:42.376895337 +0000 UTC m=+1113.413144745" lastFinishedPulling="2025-09-30 00:27:43.588387227 +0000 UTC m=+1114.624636655" observedRunningTime="2025-09-30 00:27:55.864144289 +0000 UTC m=+1126.900393697" watchObservedRunningTime="2025-09-30 00:27:55.887484968 +0000 UTC m=+1126.923734366" Sep 30 00:27:56 crc kubenswrapper[4809]: I0930 00:27:56.634868 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" Sep 30 00:27:56 crc kubenswrapper[4809]: I0930 00:27:56.865440 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f9711a36-f5cf-4143-9ebe-13efdf29aa7a","Type":"ContainerStarted","Data":"88b904ac33e7ce98a80a0b7b33bbf53d1e0d376236427dea29e9d77b3d37086a"} Sep 30 00:27:56 crc kubenswrapper[4809]: I0930 00:27:56.869737 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-ui-dashboards-6584dc9448-jw9fb" event={"ID":"b25950e5-3b7e-49e9-bd6a-2e7c645bd468","Type":"ContainerStarted","Data":"889a7b76136b1b4db06b290509b3fbe9dc34d3c489c268df33314701f79d1f63"} Sep 30 00:27:56 crc kubenswrapper[4809]: I0930 00:27:56.872320 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f7a3ddad-430a-4a9f-8f4c-56ac6431193e","Type":"ContainerStarted","Data":"011750b10155869cfe0f552c2ff1ebfca96a336a959b7e71ebd07d7ca05db0ee"} Sep 30 00:27:56 crc kubenswrapper[4809]: I0930 00:27:56.872409 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 30 00:27:56 crc kubenswrapper[4809]: I0930 00:27:56.875116 4809 generic.go:334] "Generic (PLEG): container finished" podID="c028cda5-76ab-45af-b4ab-72332d0471a0" containerID="bb47fe351175135e9a665ee6c3e262d023909524cdeb9b169b6be43fa127e443" exitCode=0 Sep 30 00:27:56 crc kubenswrapper[4809]: I0930 00:27:56.875174 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-wgkv7" event={"ID":"c028cda5-76ab-45af-b4ab-72332d0471a0","Type":"ContainerDied","Data":"bb47fe351175135e9a665ee6c3e262d023909524cdeb9b169b6be43fa127e443"} Sep 30 00:27:56 crc kubenswrapper[4809]: I0930 00:27:56.880162 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"ea47a4425c6b80c1d2b34f75ad9b62ec24ab7c72e09d6e8962f4a70eaa824489"} Sep 30 00:27:56 crc kubenswrapper[4809]: I0930 00:27:56.888810 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-swbnb" event={"ID":"40f19224-d223-4f7c-ad39-3afc9a8c18b1","Type":"ContainerStarted","Data":"ebe6fd010b93614bee726823317184009b5693b9ed6fc2646587e0310f8bc3b2"} Sep 30 00:27:56 crc kubenswrapper[4809]: I0930 00:27:56.890964 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-swbnb" Sep 30 00:27:56 crc kubenswrapper[4809]: I0930 00:27:56.898338 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3c991006-090a-45ae-afec-388e497dd0ad","Type":"ContainerStarted","Data":"91eb22d11cbe10cbf3e03291f8131e1f2ee2c6d6095033be8ca2578902fa1564"} Sep 30 00:27:56 crc kubenswrapper[4809]: I0930 00:27:56.901453 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c5df7664-528d-4076-a69f-bc59afb921a1","Type":"ContainerStarted","Data":"373eb8cd06f8ac0e4c97933b614b34a30213f34a86cc96f34dbc756fb4c2db0d"} Sep 30 00:27:56 crc kubenswrapper[4809]: I0930 00:27:56.903263 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"279ac629-91a5-4dcc-9c7a-a8e64b4a1874","Type":"ContainerStarted","Data":"330596345da48cfc3d1b989911085f0fba1d988a4d05220ac8c9db3ef8302d22"} Sep 30 00:27:56 crc kubenswrapper[4809]: I0930 00:27:56.903386 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Sep 30 00:27:56 crc kubenswrapper[4809]: I0930 00:27:56.905857 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79","Type":"ContainerStarted","Data":"5f9228ac85ba146faa9ce7e5319c6daaa61cbd8a86b357af161a141e9021adaf"} Sep 30 00:27:56 crc kubenswrapper[4809]: I0930 00:27:56.955137 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-ui-dashboards-6584dc9448-jw9fb" podStartSLOduration=12.020945131 podStartE2EDuration="23.95509164s" podCreationTimestamp="2025-09-30 00:27:33 +0000 UTC" firstStartedPulling="2025-09-30 00:27:42.37363503 +0000 UTC m=+1113.409884438" lastFinishedPulling="2025-09-30 00:27:54.307781549 +0000 UTC m=+1125.344030947" observedRunningTime="2025-09-30 00:27:56.951311209 +0000 UTC m=+1127.987560637" watchObservedRunningTime="2025-09-30 00:27:56.95509164 +0000 UTC m=+1127.991341068" Sep 30 00:27:56 crc kubenswrapper[4809]: I0930 00:27:56.968894 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=11.837623061 podStartE2EDuration="24.968875162s" podCreationTimestamp="2025-09-30 00:27:32 +0000 UTC" firstStartedPulling="2025-09-30 00:27:42.372085228 +0000 UTC m=+1113.408334636" lastFinishedPulling="2025-09-30 00:27:55.503337329 +0000 UTC m=+1126.539586737" observedRunningTime="2025-09-30 00:27:56.967431383 +0000 UTC m=+1128.003680801" watchObservedRunningTime="2025-09-30 00:27:56.968875162 +0000 UTC m=+1128.005124570" Sep 30 00:27:56 crc kubenswrapper[4809]: I0930 00:27:56.990857 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=14.834478008 podStartE2EDuration="26.990839244s" podCreationTimestamp="2025-09-30 00:27:30 +0000 UTC" firstStartedPulling="2025-09-30 00:27:42.330439826 +0000 UTC m=+1113.366689234" lastFinishedPulling="2025-09-30 00:27:54.486801062 +0000 UTC m=+1125.523050470" observedRunningTime="2025-09-30 00:27:56.988963653 +0000 UTC m=+1128.025213061" watchObservedRunningTime="2025-09-30 00:27:56.990839244 +0000 UTC m=+1128.027088662" Sep 30 00:27:57 crc kubenswrapper[4809]: I0930 00:27:57.044404 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-swbnb" podStartSLOduration=8.413530137 podStartE2EDuration="21.044380936s" podCreationTimestamp="2025-09-30 00:27:36 +0000 UTC" firstStartedPulling="2025-09-30 00:27:42.351184745 +0000 UTC m=+1113.387434153" lastFinishedPulling="2025-09-30 00:27:54.982035544 +0000 UTC m=+1126.018284952" observedRunningTime="2025-09-30 00:27:57.012065366 +0000 UTC m=+1128.048314794" watchObservedRunningTime="2025-09-30 00:27:57.044380936 +0000 UTC m=+1128.080630344" Sep 30 00:27:57 crc kubenswrapper[4809]: I0930 00:27:57.917744 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-wgkv7" event={"ID":"c028cda5-76ab-45af-b4ab-72332d0471a0","Type":"ContainerStarted","Data":"e146377222268e0e028e6a7509971d8e20c2d2f7d309de5a67326a840d6f6280"} Sep 30 00:27:57 crc kubenswrapper[4809]: I0930 00:27:57.918111 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-wgkv7" event={"ID":"c028cda5-76ab-45af-b4ab-72332d0471a0","Type":"ContainerStarted","Data":"7a6ed0fb5b31fac4d604f9d737484c01015af3dfa22188eccf9c5ee00416c37d"} Sep 30 00:27:57 crc kubenswrapper[4809]: I0930 00:27:57.941590 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-wgkv7" podStartSLOduration=10.776822316 podStartE2EDuration="21.941569197s" podCreationTimestamp="2025-09-30 00:27:36 +0000 UTC" firstStartedPulling="2025-09-30 00:27:43.517843946 +0000 UTC m=+1114.554093354" lastFinishedPulling="2025-09-30 00:27:54.682590837 +0000 UTC m=+1125.718840235" observedRunningTime="2025-09-30 00:27:57.937872528 +0000 UTC m=+1128.974121946" watchObservedRunningTime="2025-09-30 00:27:57.941569197 +0000 UTC m=+1128.977818605" Sep 30 00:27:58 crc kubenswrapper[4809]: I0930 00:27:58.925094 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:58 crc kubenswrapper[4809]: I0930 00:27:58.925422 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:27:59 crc kubenswrapper[4809]: I0930 00:27:59.933367 4809 generic.go:334] "Generic (PLEG): container finished" podID="c5df7664-528d-4076-a69f-bc59afb921a1" containerID="373eb8cd06f8ac0e4c97933b614b34a30213f34a86cc96f34dbc756fb4c2db0d" exitCode=0 Sep 30 00:27:59 crc kubenswrapper[4809]: I0930 00:27:59.933483 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c5df7664-528d-4076-a69f-bc59afb921a1","Type":"ContainerDied","Data":"373eb8cd06f8ac0e4c97933b614b34a30213f34a86cc96f34dbc756fb4c2db0d"} Sep 30 00:27:59 crc kubenswrapper[4809]: I0930 00:27:59.935918 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5681f926-bb52-4288-b0cc-ca30e087457d","Type":"ContainerStarted","Data":"6a708690ab6db92a5831a49baff934dfaea893e35040fc08dc056c48ebd3b150"} Sep 30 00:28:00 crc kubenswrapper[4809]: I0930 00:28:00.947690 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"2cfa1ec3-7355-4a69-a63f-ee850e1e8e79","Type":"ContainerStarted","Data":"f14799ef66c8717e426b3f5f7dbbd9218184e3b18b5dd27dd1aca80a50db2426"} Sep 30 00:28:00 crc kubenswrapper[4809]: I0930 00:28:00.951858 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3c991006-090a-45ae-afec-388e497dd0ad","Type":"ContainerStarted","Data":"1aaf01becc758c8c32cd2a27ef993fa65fbc97525e8ff3f278c2254a5da341c1"} Sep 30 00:28:00 crc kubenswrapper[4809]: I0930 00:28:00.955470 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c5df7664-528d-4076-a69f-bc59afb921a1","Type":"ContainerStarted","Data":"3082460211bc4382ef775d5970bc7f59facd52827b84077700a8bb1ca13b0ade"} Sep 30 00:28:00 crc kubenswrapper[4809]: I0930 00:28:00.957225 4809 generic.go:334] "Generic (PLEG): container finished" podID="f9711a36-f5cf-4143-9ebe-13efdf29aa7a" containerID="88b904ac33e7ce98a80a0b7b33bbf53d1e0d376236427dea29e9d77b3d37086a" exitCode=0 Sep 30 00:28:00 crc kubenswrapper[4809]: I0930 00:28:00.957322 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f9711a36-f5cf-4143-9ebe-13efdf29aa7a","Type":"ContainerDied","Data":"88b904ac33e7ce98a80a0b7b33bbf53d1e0d376236427dea29e9d77b3d37086a"} Sep 30 00:28:00 crc kubenswrapper[4809]: I0930 00:28:00.976869 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=5.06250251 podStartE2EDuration="21.976849841s" podCreationTimestamp="2025-09-30 00:27:39 +0000 UTC" firstStartedPulling="2025-09-30 00:27:43.517724713 +0000 UTC m=+1114.553974131" lastFinishedPulling="2025-09-30 00:28:00.432072054 +0000 UTC m=+1131.468321462" observedRunningTime="2025-09-30 00:28:00.975502225 +0000 UTC m=+1132.011751633" watchObservedRunningTime="2025-09-30 00:28:00.976849841 +0000 UTC m=+1132.013099259" Sep 30 00:28:01 crc kubenswrapper[4809]: I0930 00:28:01.036643 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=19.698476997 podStartE2EDuration="32.036618871s" podCreationTimestamp="2025-09-30 00:27:29 +0000 UTC" firstStartedPulling="2025-09-30 00:27:42.343077366 +0000 UTC m=+1113.379326774" lastFinishedPulling="2025-09-30 00:27:54.68121924 +0000 UTC m=+1125.717468648" observedRunningTime="2025-09-30 00:28:01.03208165 +0000 UTC m=+1132.068331078" watchObservedRunningTime="2025-09-30 00:28:01.036618871 +0000 UTC m=+1132.072868299" Sep 30 00:28:01 crc kubenswrapper[4809]: I0930 00:28:01.057990 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=9.162815053 podStartE2EDuration="26.057972507s" podCreationTimestamp="2025-09-30 00:27:35 +0000 UTC" firstStartedPulling="2025-09-30 00:27:43.51759355 +0000 UTC m=+1114.553842978" lastFinishedPulling="2025-09-30 00:28:00.412751024 +0000 UTC m=+1131.449000432" observedRunningTime="2025-09-30 00:28:01.051250305 +0000 UTC m=+1132.087499723" watchObservedRunningTime="2025-09-30 00:28:01.057972507 +0000 UTC m=+1132.094221925" Sep 30 00:28:01 crc kubenswrapper[4809]: I0930 00:28:01.185833 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Sep 30 00:28:01 crc kubenswrapper[4809]: I0930 00:28:01.363857 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-666b6646f7-85wfs" Sep 30 00:28:01 crc kubenswrapper[4809]: I0930 00:28:01.411772 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Sep 30 00:28:01 crc kubenswrapper[4809]: I0930 00:28:01.466144 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Sep 30 00:28:01 crc kubenswrapper[4809]: I0930 00:28:01.635621 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" Sep 30 00:28:01 crc kubenswrapper[4809]: I0930 00:28:01.679758 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-85wfs"] Sep 30 00:28:01 crc kubenswrapper[4809]: I0930 00:28:01.714582 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Sep 30 00:28:01 crc kubenswrapper[4809]: I0930 00:28:01.967455 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f9711a36-f5cf-4143-9ebe-13efdf29aa7a","Type":"ContainerStarted","Data":"72c75e1b445f942ac7876352072eee34b518fe4c2444b3ba54babbbcf2d51485"} Sep 30 00:28:01 crc kubenswrapper[4809]: I0930 00:28:01.968078 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-85wfs" podUID="4fb283ce-093d-4ee7-85dc-dd4765b07bed" containerName="dnsmasq-dns" containerID="cri-o://a54e58963a328cd80541a0cbf3f018ddd5ecf2e34795aa948c6298be681628c9" gracePeriod=10 Sep 30 00:28:01 crc kubenswrapper[4809]: I0930 00:28:01.968377 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.010340 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=20.352040287 podStartE2EDuration="33.010319984s" podCreationTimestamp="2025-09-30 00:27:29 +0000 UTC" firstStartedPulling="2025-09-30 00:27:42.32202975 +0000 UTC m=+1113.358279158" lastFinishedPulling="2025-09-30 00:27:54.980309447 +0000 UTC m=+1126.016558855" observedRunningTime="2025-09-30 00:28:02.002589586 +0000 UTC m=+1133.038839014" watchObservedRunningTime="2025-09-30 00:28:02.010319984 +0000 UTC m=+1133.046569392" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.021607 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.310408 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-tqs44"] Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.312484 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-tqs44" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.314457 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.404256 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9611e11-5c01-45b3-962f-ae5dbcf76c95-config\") pod \"dnsmasq-dns-7f896c8c65-tqs44\" (UID: \"d9611e11-5c01-45b3-962f-ae5dbcf76c95\") " pod="openstack/dnsmasq-dns-7f896c8c65-tqs44" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.404319 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d9611e11-5c01-45b3-962f-ae5dbcf76c95-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-tqs44\" (UID: \"d9611e11-5c01-45b3-962f-ae5dbcf76c95\") " pod="openstack/dnsmasq-dns-7f896c8c65-tqs44" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.404354 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d9611e11-5c01-45b3-962f-ae5dbcf76c95-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-tqs44\" (UID: \"d9611e11-5c01-45b3-962f-ae5dbcf76c95\") " pod="openstack/dnsmasq-dns-7f896c8c65-tqs44" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.404402 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92dlb\" (UniqueName: \"kubernetes.io/projected/d9611e11-5c01-45b3-962f-ae5dbcf76c95-kube-api-access-92dlb\") pod \"dnsmasq-dns-7f896c8c65-tqs44\" (UID: \"d9611e11-5c01-45b3-962f-ae5dbcf76c95\") " pod="openstack/dnsmasq-dns-7f896c8c65-tqs44" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.404922 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-tqs44"] Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.492337 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-h8c75"] Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.493949 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-h8c75" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.500002 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.507121 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9611e11-5c01-45b3-962f-ae5dbcf76c95-config\") pod \"dnsmasq-dns-7f896c8c65-tqs44\" (UID: \"d9611e11-5c01-45b3-962f-ae5dbcf76c95\") " pod="openstack/dnsmasq-dns-7f896c8c65-tqs44" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.507455 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d9611e11-5c01-45b3-962f-ae5dbcf76c95-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-tqs44\" (UID: \"d9611e11-5c01-45b3-962f-ae5dbcf76c95\") " pod="openstack/dnsmasq-dns-7f896c8c65-tqs44" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.507486 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d9611e11-5c01-45b3-962f-ae5dbcf76c95-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-tqs44\" (UID: \"d9611e11-5c01-45b3-962f-ae5dbcf76c95\") " pod="openstack/dnsmasq-dns-7f896c8c65-tqs44" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.507558 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92dlb\" (UniqueName: \"kubernetes.io/projected/d9611e11-5c01-45b3-962f-ae5dbcf76c95-kube-api-access-92dlb\") pod \"dnsmasq-dns-7f896c8c65-tqs44\" (UID: \"d9611e11-5c01-45b3-962f-ae5dbcf76c95\") " pod="openstack/dnsmasq-dns-7f896c8c65-tqs44" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.509390 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9611e11-5c01-45b3-962f-ae5dbcf76c95-config\") pod \"dnsmasq-dns-7f896c8c65-tqs44\" (UID: \"d9611e11-5c01-45b3-962f-ae5dbcf76c95\") " pod="openstack/dnsmasq-dns-7f896c8c65-tqs44" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.509770 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d9611e11-5c01-45b3-962f-ae5dbcf76c95-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-tqs44\" (UID: \"d9611e11-5c01-45b3-962f-ae5dbcf76c95\") " pod="openstack/dnsmasq-dns-7f896c8c65-tqs44" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.510404 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d9611e11-5c01-45b3-962f-ae5dbcf76c95-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-tqs44\" (UID: \"d9611e11-5c01-45b3-962f-ae5dbcf76c95\") " pod="openstack/dnsmasq-dns-7f896c8c65-tqs44" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.522253 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-h8c75"] Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.551969 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92dlb\" (UniqueName: \"kubernetes.io/projected/d9611e11-5c01-45b3-962f-ae5dbcf76c95-kube-api-access-92dlb\") pod \"dnsmasq-dns-7f896c8c65-tqs44\" (UID: \"d9611e11-5c01-45b3-962f-ae5dbcf76c95\") " pod="openstack/dnsmasq-dns-7f896c8c65-tqs44" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.608921 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e469bd8b-615c-425a-89b0-2d37b4f738dd-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-h8c75\" (UID: \"e469bd8b-615c-425a-89b0-2d37b4f738dd\") " pod="openstack/ovn-controller-metrics-h8c75" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.609008 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j74rb\" (UniqueName: \"kubernetes.io/projected/e469bd8b-615c-425a-89b0-2d37b4f738dd-kube-api-access-j74rb\") pod \"ovn-controller-metrics-h8c75\" (UID: \"e469bd8b-615c-425a-89b0-2d37b4f738dd\") " pod="openstack/ovn-controller-metrics-h8c75" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.609056 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e469bd8b-615c-425a-89b0-2d37b4f738dd-ovs-rundir\") pod \"ovn-controller-metrics-h8c75\" (UID: \"e469bd8b-615c-425a-89b0-2d37b4f738dd\") " pod="openstack/ovn-controller-metrics-h8c75" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.609102 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e469bd8b-615c-425a-89b0-2d37b4f738dd-combined-ca-bundle\") pod \"ovn-controller-metrics-h8c75\" (UID: \"e469bd8b-615c-425a-89b0-2d37b4f738dd\") " pod="openstack/ovn-controller-metrics-h8c75" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.609145 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e469bd8b-615c-425a-89b0-2d37b4f738dd-config\") pod \"ovn-controller-metrics-h8c75\" (UID: \"e469bd8b-615c-425a-89b0-2d37b4f738dd\") " pod="openstack/ovn-controller-metrics-h8c75" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.609181 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e469bd8b-615c-425a-89b0-2d37b4f738dd-ovn-rundir\") pod \"ovn-controller-metrics-h8c75\" (UID: \"e469bd8b-615c-425a-89b0-2d37b4f738dd\") " pod="openstack/ovn-controller-metrics-h8c75" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.667326 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-85wfs" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.711732 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e469bd8b-615c-425a-89b0-2d37b4f738dd-ovs-rundir\") pod \"ovn-controller-metrics-h8c75\" (UID: \"e469bd8b-615c-425a-89b0-2d37b4f738dd\") " pod="openstack/ovn-controller-metrics-h8c75" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.711890 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e469bd8b-615c-425a-89b0-2d37b4f738dd-combined-ca-bundle\") pod \"ovn-controller-metrics-h8c75\" (UID: \"e469bd8b-615c-425a-89b0-2d37b4f738dd\") " pod="openstack/ovn-controller-metrics-h8c75" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.711923 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e469bd8b-615c-425a-89b0-2d37b4f738dd-config\") pod \"ovn-controller-metrics-h8c75\" (UID: \"e469bd8b-615c-425a-89b0-2d37b4f738dd\") " pod="openstack/ovn-controller-metrics-h8c75" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.712009 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e469bd8b-615c-425a-89b0-2d37b4f738dd-ovn-rundir\") pod \"ovn-controller-metrics-h8c75\" (UID: \"e469bd8b-615c-425a-89b0-2d37b4f738dd\") " pod="openstack/ovn-controller-metrics-h8c75" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.712106 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e469bd8b-615c-425a-89b0-2d37b4f738dd-ovn-rundir\") pod \"ovn-controller-metrics-h8c75\" (UID: \"e469bd8b-615c-425a-89b0-2d37b4f738dd\") " pod="openstack/ovn-controller-metrics-h8c75" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.712112 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e469bd8b-615c-425a-89b0-2d37b4f738dd-ovs-rundir\") pod \"ovn-controller-metrics-h8c75\" (UID: \"e469bd8b-615c-425a-89b0-2d37b4f738dd\") " pod="openstack/ovn-controller-metrics-h8c75" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.712386 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e469bd8b-615c-425a-89b0-2d37b4f738dd-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-h8c75\" (UID: \"e469bd8b-615c-425a-89b0-2d37b4f738dd\") " pod="openstack/ovn-controller-metrics-h8c75" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.712462 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j74rb\" (UniqueName: \"kubernetes.io/projected/e469bd8b-615c-425a-89b0-2d37b4f738dd-kube-api-access-j74rb\") pod \"ovn-controller-metrics-h8c75\" (UID: \"e469bd8b-615c-425a-89b0-2d37b4f738dd\") " pod="openstack/ovn-controller-metrics-h8c75" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.712795 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e469bd8b-615c-425a-89b0-2d37b4f738dd-config\") pod \"ovn-controller-metrics-h8c75\" (UID: \"e469bd8b-615c-425a-89b0-2d37b4f738dd\") " pod="openstack/ovn-controller-metrics-h8c75" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.727793 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e469bd8b-615c-425a-89b0-2d37b4f738dd-combined-ca-bundle\") pod \"ovn-controller-metrics-h8c75\" (UID: \"e469bd8b-615c-425a-89b0-2d37b4f738dd\") " pod="openstack/ovn-controller-metrics-h8c75" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.730092 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e469bd8b-615c-425a-89b0-2d37b4f738dd-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-h8c75\" (UID: \"e469bd8b-615c-425a-89b0-2d37b4f738dd\") " pod="openstack/ovn-controller-metrics-h8c75" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.730882 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-tqs44" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.750269 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j74rb\" (UniqueName: \"kubernetes.io/projected/e469bd8b-615c-425a-89b0-2d37b4f738dd-kube-api-access-j74rb\") pod \"ovn-controller-metrics-h8c75\" (UID: \"e469bd8b-615c-425a-89b0-2d37b4f738dd\") " pod="openstack/ovn-controller-metrics-h8c75" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.813346 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4fb283ce-093d-4ee7-85dc-dd4765b07bed-config\") pod \"4fb283ce-093d-4ee7-85dc-dd4765b07bed\" (UID: \"4fb283ce-093d-4ee7-85dc-dd4765b07bed\") " Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.813764 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6sl6\" (UniqueName: \"kubernetes.io/projected/4fb283ce-093d-4ee7-85dc-dd4765b07bed-kube-api-access-f6sl6\") pod \"4fb283ce-093d-4ee7-85dc-dd4765b07bed\" (UID: \"4fb283ce-093d-4ee7-85dc-dd4765b07bed\") " Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.813961 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4fb283ce-093d-4ee7-85dc-dd4765b07bed-dns-svc\") pod \"4fb283ce-093d-4ee7-85dc-dd4765b07bed\" (UID: \"4fb283ce-093d-4ee7-85dc-dd4765b07bed\") " Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.827016 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fb283ce-093d-4ee7-85dc-dd4765b07bed-kube-api-access-f6sl6" (OuterVolumeSpecName: "kube-api-access-f6sl6") pod "4fb283ce-093d-4ee7-85dc-dd4765b07bed" (UID: "4fb283ce-093d-4ee7-85dc-dd4765b07bed"). InnerVolumeSpecName "kube-api-access-f6sl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.844247 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-h8c75" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.902405 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fb283ce-093d-4ee7-85dc-dd4765b07bed-config" (OuterVolumeSpecName: "config") pod "4fb283ce-093d-4ee7-85dc-dd4765b07bed" (UID: "4fb283ce-093d-4ee7-85dc-dd4765b07bed"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.920311 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4fb283ce-093d-4ee7-85dc-dd4765b07bed-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.920342 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6sl6\" (UniqueName: \"kubernetes.io/projected/4fb283ce-093d-4ee7-85dc-dd4765b07bed-kube-api-access-f6sl6\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:02 crc kubenswrapper[4809]: I0930 00:28:02.921048 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fb283ce-093d-4ee7-85dc-dd4765b07bed-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4fb283ce-093d-4ee7-85dc-dd4765b07bed" (UID: "4fb283ce-093d-4ee7-85dc-dd4765b07bed"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.005964 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-tqs44"] Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.010317 4809 generic.go:334] "Generic (PLEG): container finished" podID="4fb283ce-093d-4ee7-85dc-dd4765b07bed" containerID="a54e58963a328cd80541a0cbf3f018ddd5ecf2e34795aa948c6298be681628c9" exitCode=0 Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.010596 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-85wfs" event={"ID":"4fb283ce-093d-4ee7-85dc-dd4765b07bed","Type":"ContainerDied","Data":"a54e58963a328cd80541a0cbf3f018ddd5ecf2e34795aa948c6298be681628c9"} Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.010627 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-85wfs" event={"ID":"4fb283ce-093d-4ee7-85dc-dd4765b07bed","Type":"ContainerDied","Data":"bcdf0aea462d260e5df8b0ccfb7aeb268273efa5a999513bde7d78ff73f792e2"} Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.010667 4809 scope.go:117] "RemoveContainer" containerID="a54e58963a328cd80541a0cbf3f018ddd5ecf2e34795aa948c6298be681628c9" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.010772 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-85wfs" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.024697 4809 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4fb283ce-093d-4ee7-85dc-dd4765b07bed-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.058162 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-9kcbz"] Sep 30 00:28:03 crc kubenswrapper[4809]: E0930 00:28:03.060259 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fb283ce-093d-4ee7-85dc-dd4765b07bed" containerName="dnsmasq-dns" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.060283 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fb283ce-093d-4ee7-85dc-dd4765b07bed" containerName="dnsmasq-dns" Sep 30 00:28:03 crc kubenswrapper[4809]: E0930 00:28:03.060299 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fb283ce-093d-4ee7-85dc-dd4765b07bed" containerName="init" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.060305 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fb283ce-093d-4ee7-85dc-dd4765b07bed" containerName="init" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.060518 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fb283ce-093d-4ee7-85dc-dd4765b07bed" containerName="dnsmasq-dns" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.062453 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.068231 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.079029 4809 scope.go:117] "RemoveContainer" containerID="fd380805c59d8901f9266ea31250814bc5c9fc4e01ac00896719ab7673771304" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.081337 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-9kcbz"] Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.150982 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-85wfs"] Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.175852 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-85wfs"] Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.184960 4809 scope.go:117] "RemoveContainer" containerID="a54e58963a328cd80541a0cbf3f018ddd5ecf2e34795aa948c6298be681628c9" Sep 30 00:28:03 crc kubenswrapper[4809]: E0930 00:28:03.186481 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a54e58963a328cd80541a0cbf3f018ddd5ecf2e34795aa948c6298be681628c9\": container with ID starting with a54e58963a328cd80541a0cbf3f018ddd5ecf2e34795aa948c6298be681628c9 not found: ID does not exist" containerID="a54e58963a328cd80541a0cbf3f018ddd5ecf2e34795aa948c6298be681628c9" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.186522 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a54e58963a328cd80541a0cbf3f018ddd5ecf2e34795aa948c6298be681628c9"} err="failed to get container status \"a54e58963a328cd80541a0cbf3f018ddd5ecf2e34795aa948c6298be681628c9\": rpc error: code = NotFound desc = could not find container \"a54e58963a328cd80541a0cbf3f018ddd5ecf2e34795aa948c6298be681628c9\": container with ID starting with a54e58963a328cd80541a0cbf3f018ddd5ecf2e34795aa948c6298be681628c9 not found: ID does not exist" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.186556 4809 scope.go:117] "RemoveContainer" containerID="fd380805c59d8901f9266ea31250814bc5c9fc4e01ac00896719ab7673771304" Sep 30 00:28:03 crc kubenswrapper[4809]: E0930 00:28:03.188826 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd380805c59d8901f9266ea31250814bc5c9fc4e01ac00896719ab7673771304\": container with ID starting with fd380805c59d8901f9266ea31250814bc5c9fc4e01ac00896719ab7673771304 not found: ID does not exist" containerID="fd380805c59d8901f9266ea31250814bc5c9fc4e01ac00896719ab7673771304" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.188867 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd380805c59d8901f9266ea31250814bc5c9fc4e01ac00896719ab7673771304"} err="failed to get container status \"fd380805c59d8901f9266ea31250814bc5c9fc4e01ac00896719ab7673771304\": rpc error: code = NotFound desc = could not find container \"fd380805c59d8901f9266ea31250814bc5c9fc4e01ac00896719ab7673771304\": container with ID starting with fd380805c59d8901f9266ea31250814bc5c9fc4e01ac00896719ab7673771304 not found: ID does not exist" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.230225 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb82x\" (UniqueName: \"kubernetes.io/projected/fdf6876b-decd-4f69-ac2b-0725d2f47370-kube-api-access-xb82x\") pod \"dnsmasq-dns-86db49b7ff-9kcbz\" (UID: \"fdf6876b-decd-4f69-ac2b-0725d2f47370\") " pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.231689 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-9kcbz\" (UID: \"fdf6876b-decd-4f69-ac2b-0725d2f47370\") " pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.231759 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-9kcbz\" (UID: \"fdf6876b-decd-4f69-ac2b-0725d2f47370\") " pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.231874 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-9kcbz\" (UID: \"fdf6876b-decd-4f69-ac2b-0725d2f47370\") " pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.231911 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-config\") pod \"dnsmasq-dns-86db49b7ff-9kcbz\" (UID: \"fdf6876b-decd-4f69-ac2b-0725d2f47370\") " pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.244612 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.334587 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-9kcbz\" (UID: \"fdf6876b-decd-4f69-ac2b-0725d2f47370\") " pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.334675 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-config\") pod \"dnsmasq-dns-86db49b7ff-9kcbz\" (UID: \"fdf6876b-decd-4f69-ac2b-0725d2f47370\") " pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.334744 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb82x\" (UniqueName: \"kubernetes.io/projected/fdf6876b-decd-4f69-ac2b-0725d2f47370-kube-api-access-xb82x\") pod \"dnsmasq-dns-86db49b7ff-9kcbz\" (UID: \"fdf6876b-decd-4f69-ac2b-0725d2f47370\") " pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.334864 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-9kcbz\" (UID: \"fdf6876b-decd-4f69-ac2b-0725d2f47370\") " pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.334928 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-9kcbz\" (UID: \"fdf6876b-decd-4f69-ac2b-0725d2f47370\") " pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.336236 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-9kcbz\" (UID: \"fdf6876b-decd-4f69-ac2b-0725d2f47370\") " pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.337716 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-config\") pod \"dnsmasq-dns-86db49b7ff-9kcbz\" (UID: \"fdf6876b-decd-4f69-ac2b-0725d2f47370\") " pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.338534 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-9kcbz\" (UID: \"fdf6876b-decd-4f69-ac2b-0725d2f47370\") " pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.350348 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-9kcbz\" (UID: \"fdf6876b-decd-4f69-ac2b-0725d2f47370\") " pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.382903 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb82x\" (UniqueName: \"kubernetes.io/projected/fdf6876b-decd-4f69-ac2b-0725d2f47370-kube-api-access-xb82x\") pod \"dnsmasq-dns-86db49b7ff-9kcbz\" (UID: \"fdf6876b-decd-4f69-ac2b-0725d2f47370\") " pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.402735 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-9kcbz"] Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.403436 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.503946 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-tqs44"] Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.528968 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-gj5gk"] Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.530489 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.539789 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-gj5gk"] Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.638932 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-gj5gk\" (UID: \"25170433-57de-43a1-8459-8c2e31c42ffb\") " pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.640754 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-config\") pod \"dnsmasq-dns-698758b865-gj5gk\" (UID: \"25170433-57de-43a1-8459-8c2e31c42ffb\") " pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.640810 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-dns-svc\") pod \"dnsmasq-dns-698758b865-gj5gk\" (UID: \"25170433-57de-43a1-8459-8c2e31c42ffb\") " pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.640830 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-gj5gk\" (UID: \"25170433-57de-43a1-8459-8c2e31c42ffb\") " pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.640860 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-498v7\" (UniqueName: \"kubernetes.io/projected/25170433-57de-43a1-8459-8c2e31c42ffb-kube-api-access-498v7\") pod \"dnsmasq-dns-698758b865-gj5gk\" (UID: \"25170433-57de-43a1-8459-8c2e31c42ffb\") " pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.705617 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fb283ce-093d-4ee7-85dc-dd4765b07bed" path="/var/lib/kubelet/pods/4fb283ce-093d-4ee7-85dc-dd4765b07bed/volumes" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.714486 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.742197 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-dns-svc\") pod \"dnsmasq-dns-698758b865-gj5gk\" (UID: \"25170433-57de-43a1-8459-8c2e31c42ffb\") " pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.742240 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-gj5gk\" (UID: \"25170433-57de-43a1-8459-8c2e31c42ffb\") " pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.742272 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-498v7\" (UniqueName: \"kubernetes.io/projected/25170433-57de-43a1-8459-8c2e31c42ffb-kube-api-access-498v7\") pod \"dnsmasq-dns-698758b865-gj5gk\" (UID: \"25170433-57de-43a1-8459-8c2e31c42ffb\") " pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.742431 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-gj5gk\" (UID: \"25170433-57de-43a1-8459-8c2e31c42ffb\") " pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.742511 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-config\") pod \"dnsmasq-dns-698758b865-gj5gk\" (UID: \"25170433-57de-43a1-8459-8c2e31c42ffb\") " pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.743370 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-dns-svc\") pod \"dnsmasq-dns-698758b865-gj5gk\" (UID: \"25170433-57de-43a1-8459-8c2e31c42ffb\") " pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.743415 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-config\") pod \"dnsmasq-dns-698758b865-gj5gk\" (UID: \"25170433-57de-43a1-8459-8c2e31c42ffb\") " pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.744037 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-gj5gk\" (UID: \"25170433-57de-43a1-8459-8c2e31c42ffb\") " pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.744071 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-gj5gk\" (UID: \"25170433-57de-43a1-8459-8c2e31c42ffb\") " pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.777518 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-498v7\" (UniqueName: \"kubernetes.io/projected/25170433-57de-43a1-8459-8c2e31c42ffb-kube-api-access-498v7\") pod \"dnsmasq-dns-698758b865-gj5gk\" (UID: \"25170433-57de-43a1-8459-8c2e31c42ffb\") " pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.807787 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Sep 30 00:28:03 crc kubenswrapper[4809]: I0930 00:28:03.900808 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-h8c75"] Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.025912 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.039826 4809 generic.go:334] "Generic (PLEG): container finished" podID="d9611e11-5c01-45b3-962f-ae5dbcf76c95" containerID="ef394cdce14460d4f8dd51a1f129d011d19c8bcb7f04d3f8a9fd4151d3903558" exitCode=0 Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.039882 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-tqs44" event={"ID":"d9611e11-5c01-45b3-962f-ae5dbcf76c95","Type":"ContainerDied","Data":"ef394cdce14460d4f8dd51a1f129d011d19c8bcb7f04d3f8a9fd4151d3903558"} Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.039907 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-tqs44" event={"ID":"d9611e11-5c01-45b3-962f-ae5dbcf76c95","Type":"ContainerStarted","Data":"055424135aacd0e224cd3f1c12a3a9baeb6c91747030627877cecb5f043740ca"} Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.050538 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-h8c75" event={"ID":"e469bd8b-615c-425a-89b0-2d37b4f738dd","Type":"ContainerStarted","Data":"ba9e17df665bdf2ee7c1006db02276671e7da73a7682b35b6eaaa8227fab1190"} Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.115902 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.117496 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-9kcbz"] Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.327027 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.337604 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.340756 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.344917 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.347760 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.348463 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.352058 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-bdk2h" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.459442 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-scripts\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.459828 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-config\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.459851 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.459874 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.459901 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.459933 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.460019 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfzqk\" (UniqueName: \"kubernetes.io/projected/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-kube-api-access-pfzqk\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.463438 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.480494 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.480671 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.485731 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-85hjf" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.485914 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.486043 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.486161 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.563752 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfzqk\" (UniqueName: \"kubernetes.io/projected/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-kube-api-access-pfzqk\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.563855 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-scripts\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.563905 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-config\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.563926 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.563953 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.563982 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.564021 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.566992 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-config\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.567942 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-scripts\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.568245 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.579163 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.580355 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.581054 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.626518 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfzqk\" (UniqueName: \"kubernetes.io/projected/a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5-kube-api-access-pfzqk\") pod \"ovn-northd-0\" (UID: \"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5\") " pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.665386 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-lock\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") " pod="openstack/swift-storage-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.665489 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-etc-swift\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") " pod="openstack/swift-storage-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.665563 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-cache\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") " pod="openstack/swift-storage-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.665582 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") " pod="openstack/swift-storage-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.665605 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rs99q\" (UniqueName: \"kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-kube-api-access-rs99q\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") " pod="openstack/swift-storage-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.686295 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.772944 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-cache\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") " pod="openstack/swift-storage-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.773248 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") " pod="openstack/swift-storage-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.773389 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rs99q\" (UniqueName: \"kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-kube-api-access-rs99q\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") " pod="openstack/swift-storage-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.773531 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-lock\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") " pod="openstack/swift-storage-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.773775 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-etc-swift\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") " pod="openstack/swift-storage-0" Sep 30 00:28:04 crc kubenswrapper[4809]: E0930 00:28:04.774077 4809 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 00:28:04 crc kubenswrapper[4809]: E0930 00:28:04.774193 4809 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 00:28:04 crc kubenswrapper[4809]: E0930 00:28:04.774338 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-etc-swift podName:8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f nodeName:}" failed. No retries permitted until 2025-09-30 00:28:05.274317639 +0000 UTC m=+1136.310567047 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-etc-swift") pod "swift-storage-0" (UID: "8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f") : configmap "swift-ring-files" not found Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.775465 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-cache\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") " pod="openstack/swift-storage-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.776010 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/swift-storage-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.780072 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-lock\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") " pod="openstack/swift-storage-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.801837 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-gj5gk"] Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.814912 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") " pod="openstack/swift-storage-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.835334 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rs99q\" (UniqueName: \"kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-kube-api-access-rs99q\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") " pod="openstack/swift-storage-0" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.964704 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-rxwsm"] Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.966086 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.967352 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-rxwsm"] Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.970498 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.974321 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 30 00:28:04 crc kubenswrapper[4809]: I0930 00:28:04.974849 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.024253 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-tqs44" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.078509 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfxkc\" (UniqueName: \"kubernetes.io/projected/be3e3acf-e816-4c7d-a70e-1f51262676be-kube-api-access-hfxkc\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.078783 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be3e3acf-e816-4c7d-a70e-1f51262676be-combined-ca-bundle\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.078863 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/be3e3acf-e816-4c7d-a70e-1f51262676be-dispersionconf\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.078883 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/be3e3acf-e816-4c7d-a70e-1f51262676be-ring-data-devices\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.078979 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/be3e3acf-e816-4c7d-a70e-1f51262676be-etc-swift\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.078998 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/be3e3acf-e816-4c7d-a70e-1f51262676be-scripts\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.079020 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/be3e3acf-e816-4c7d-a70e-1f51262676be-swiftconf\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.089921 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-h8c75" event={"ID":"e469bd8b-615c-425a-89b0-2d37b4f738dd","Type":"ContainerStarted","Data":"495bcf2f3c4b99ac9f2ebde548717f241406aa450ce0f39f4bc9ff515afe7a91"} Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.097924 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-gj5gk" event={"ID":"25170433-57de-43a1-8459-8c2e31c42ffb","Type":"ContainerStarted","Data":"460850622b0a24aa127c0e2c161dc03842db225f3e04c1ac63bb58dfe892ef75"} Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.099893 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-tqs44" event={"ID":"d9611e11-5c01-45b3-962f-ae5dbcf76c95","Type":"ContainerDied","Data":"055424135aacd0e224cd3f1c12a3a9baeb6c91747030627877cecb5f043740ca"} Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.099930 4809 scope.go:117] "RemoveContainer" containerID="ef394cdce14460d4f8dd51a1f129d011d19c8bcb7f04d3f8a9fd4151d3903558" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.100041 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-tqs44" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.109098 4809 generic.go:334] "Generic (PLEG): container finished" podID="fdf6876b-decd-4f69-ac2b-0725d2f47370" containerID="be602dc1a063a35b8200940bbac878dcb2d11c0b792530d87a6240347f095ac6" exitCode=0 Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.110455 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" event={"ID":"fdf6876b-decd-4f69-ac2b-0725d2f47370","Type":"ContainerDied","Data":"be602dc1a063a35b8200940bbac878dcb2d11c0b792530d87a6240347f095ac6"} Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.110613 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" event={"ID":"fdf6876b-decd-4f69-ac2b-0725d2f47370","Type":"ContainerStarted","Data":"b10b3c9a4ae062561b0a22e32cb18c74e55a46c88ec23f106221df26df1999b0"} Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.121343 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-h8c75" podStartSLOduration=3.121315388 podStartE2EDuration="3.121315388s" podCreationTimestamp="2025-09-30 00:28:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:28:05.105509682 +0000 UTC m=+1136.141759090" watchObservedRunningTime="2025-09-30 00:28:05.121315388 +0000 UTC m=+1136.157564796" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.197798 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d9611e11-5c01-45b3-962f-ae5dbcf76c95-ovsdbserver-sb\") pod \"d9611e11-5c01-45b3-962f-ae5dbcf76c95\" (UID: \"d9611e11-5c01-45b3-962f-ae5dbcf76c95\") " Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.197837 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92dlb\" (UniqueName: \"kubernetes.io/projected/d9611e11-5c01-45b3-962f-ae5dbcf76c95-kube-api-access-92dlb\") pod \"d9611e11-5c01-45b3-962f-ae5dbcf76c95\" (UID: \"d9611e11-5c01-45b3-962f-ae5dbcf76c95\") " Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.197909 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9611e11-5c01-45b3-962f-ae5dbcf76c95-config\") pod \"d9611e11-5c01-45b3-962f-ae5dbcf76c95\" (UID: \"d9611e11-5c01-45b3-962f-ae5dbcf76c95\") " Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.198096 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d9611e11-5c01-45b3-962f-ae5dbcf76c95-dns-svc\") pod \"d9611e11-5c01-45b3-962f-ae5dbcf76c95\" (UID: \"d9611e11-5c01-45b3-962f-ae5dbcf76c95\") " Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.198308 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfxkc\" (UniqueName: \"kubernetes.io/projected/be3e3acf-e816-4c7d-a70e-1f51262676be-kube-api-access-hfxkc\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.198335 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be3e3acf-e816-4c7d-a70e-1f51262676be-combined-ca-bundle\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.198393 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/be3e3acf-e816-4c7d-a70e-1f51262676be-dispersionconf\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.198413 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/be3e3acf-e816-4c7d-a70e-1f51262676be-ring-data-devices\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.198493 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/be3e3acf-e816-4c7d-a70e-1f51262676be-etc-swift\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.198513 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/be3e3acf-e816-4c7d-a70e-1f51262676be-scripts\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.198531 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/be3e3acf-e816-4c7d-a70e-1f51262676be-swiftconf\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.207397 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/be3e3acf-e816-4c7d-a70e-1f51262676be-swiftconf\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.208279 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/be3e3acf-e816-4c7d-a70e-1f51262676be-etc-swift\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.208727 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/be3e3acf-e816-4c7d-a70e-1f51262676be-scripts\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.217107 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/be3e3acf-e816-4c7d-a70e-1f51262676be-dispersionconf\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.219446 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be3e3acf-e816-4c7d-a70e-1f51262676be-combined-ca-bundle\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.219964 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9611e11-5c01-45b3-962f-ae5dbcf76c95-kube-api-access-92dlb" (OuterVolumeSpecName: "kube-api-access-92dlb") pod "d9611e11-5c01-45b3-962f-ae5dbcf76c95" (UID: "d9611e11-5c01-45b3-962f-ae5dbcf76c95"). InnerVolumeSpecName "kube-api-access-92dlb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.222746 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/be3e3acf-e816-4c7d-a70e-1f51262676be-ring-data-devices\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.234184 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfxkc\" (UniqueName: \"kubernetes.io/projected/be3e3acf-e816-4c7d-a70e-1f51262676be-kube-api-access-hfxkc\") pod \"swift-ring-rebalance-rxwsm\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.256784 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9611e11-5c01-45b3-962f-ae5dbcf76c95-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d9611e11-5c01-45b3-962f-ae5dbcf76c95" (UID: "d9611e11-5c01-45b3-962f-ae5dbcf76c95"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.270993 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9611e11-5c01-45b3-962f-ae5dbcf76c95-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d9611e11-5c01-45b3-962f-ae5dbcf76c95" (UID: "d9611e11-5c01-45b3-962f-ae5dbcf76c95"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.271029 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9611e11-5c01-45b3-962f-ae5dbcf76c95-config" (OuterVolumeSpecName: "config") pod "d9611e11-5c01-45b3-962f-ae5dbcf76c95" (UID: "d9611e11-5c01-45b3-962f-ae5dbcf76c95"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.300219 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-etc-swift\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") " pod="openstack/swift-storage-0" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.300301 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d9611e11-5c01-45b3-962f-ae5dbcf76c95-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.300314 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92dlb\" (UniqueName: \"kubernetes.io/projected/d9611e11-5c01-45b3-962f-ae5dbcf76c95-kube-api-access-92dlb\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.300325 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9611e11-5c01-45b3-962f-ae5dbcf76c95-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.300334 4809 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d9611e11-5c01-45b3-962f-ae5dbcf76c95-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:05 crc kubenswrapper[4809]: E0930 00:28:05.300474 4809 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 00:28:05 crc kubenswrapper[4809]: E0930 00:28:05.300491 4809 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 00:28:05 crc kubenswrapper[4809]: E0930 00:28:05.300540 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-etc-swift podName:8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f nodeName:}" failed. No retries permitted until 2025-09-30 00:28:06.300521996 +0000 UTC m=+1137.336771404 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-etc-swift") pod "swift-storage-0" (UID: "8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f") : configmap "swift-ring-files" not found Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.355360 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.422541 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.488211 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-tqs44"] Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.501059 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-tqs44"] Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.515488 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.611872 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-config\") pod \"fdf6876b-decd-4f69-ac2b-0725d2f47370\" (UID: \"fdf6876b-decd-4f69-ac2b-0725d2f47370\") " Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.612127 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-ovsdbserver-nb\") pod \"fdf6876b-decd-4f69-ac2b-0725d2f47370\" (UID: \"fdf6876b-decd-4f69-ac2b-0725d2f47370\") " Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.612356 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xb82x\" (UniqueName: \"kubernetes.io/projected/fdf6876b-decd-4f69-ac2b-0725d2f47370-kube-api-access-xb82x\") pod \"fdf6876b-decd-4f69-ac2b-0725d2f47370\" (UID: \"fdf6876b-decd-4f69-ac2b-0725d2f47370\") " Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.612437 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-dns-svc\") pod \"fdf6876b-decd-4f69-ac2b-0725d2f47370\" (UID: \"fdf6876b-decd-4f69-ac2b-0725d2f47370\") " Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.612518 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-ovsdbserver-sb\") pod \"fdf6876b-decd-4f69-ac2b-0725d2f47370\" (UID: \"fdf6876b-decd-4f69-ac2b-0725d2f47370\") " Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.623435 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdf6876b-decd-4f69-ac2b-0725d2f47370-kube-api-access-xb82x" (OuterVolumeSpecName: "kube-api-access-xb82x") pod "fdf6876b-decd-4f69-ac2b-0725d2f47370" (UID: "fdf6876b-decd-4f69-ac2b-0725d2f47370"). InnerVolumeSpecName "kube-api-access-xb82x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.644474 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fdf6876b-decd-4f69-ac2b-0725d2f47370" (UID: "fdf6876b-decd-4f69-ac2b-0725d2f47370"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.644936 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fdf6876b-decd-4f69-ac2b-0725d2f47370" (UID: "fdf6876b-decd-4f69-ac2b-0725d2f47370"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.658087 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fdf6876b-decd-4f69-ac2b-0725d2f47370" (UID: "fdf6876b-decd-4f69-ac2b-0725d2f47370"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.662870 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-config" (OuterVolumeSpecName: "config") pod "fdf6876b-decd-4f69-ac2b-0725d2f47370" (UID: "fdf6876b-decd-4f69-ac2b-0725d2f47370"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.704590 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9611e11-5c01-45b3-962f-ae5dbcf76c95" path="/var/lib/kubelet/pods/d9611e11-5c01-45b3-962f-ae5dbcf76c95/volumes" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.714692 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.714975 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xb82x\" (UniqueName: \"kubernetes.io/projected/fdf6876b-decd-4f69-ac2b-0725d2f47370-kube-api-access-xb82x\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.714988 4809 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.714996 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.715005 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdf6876b-decd-4f69-ac2b-0725d2f47370-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:05 crc kubenswrapper[4809]: I0930 00:28:05.976753 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-rxwsm"] Sep 30 00:28:06 crc kubenswrapper[4809]: I0930 00:28:06.118988 4809 generic.go:334] "Generic (PLEG): container finished" podID="25170433-57de-43a1-8459-8c2e31c42ffb" containerID="336f954ee25cc9b309f23dd7eb6afff96ea5ddf0ad681113d006924c02f0c5f6" exitCode=0 Sep 30 00:28:06 crc kubenswrapper[4809]: I0930 00:28:06.119039 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-gj5gk" event={"ID":"25170433-57de-43a1-8459-8c2e31c42ffb","Type":"ContainerDied","Data":"336f954ee25cc9b309f23dd7eb6afff96ea5ddf0ad681113d006924c02f0c5f6"} Sep 30 00:28:06 crc kubenswrapper[4809]: I0930 00:28:06.120821 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-rxwsm" event={"ID":"be3e3acf-e816-4c7d-a70e-1f51262676be","Type":"ContainerStarted","Data":"9dde0914023c02a9f125eff8ca95786c5f3eea8a6708d6aa5baa06d57cbdf094"} Sep 30 00:28:06 crc kubenswrapper[4809]: I0930 00:28:06.124173 4809 generic.go:334] "Generic (PLEG): container finished" podID="5681f926-bb52-4288-b0cc-ca30e087457d" containerID="6a708690ab6db92a5831a49baff934dfaea893e35040fc08dc056c48ebd3b150" exitCode=0 Sep 30 00:28:06 crc kubenswrapper[4809]: I0930 00:28:06.124293 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5681f926-bb52-4288-b0cc-ca30e087457d","Type":"ContainerDied","Data":"6a708690ab6db92a5831a49baff934dfaea893e35040fc08dc056c48ebd3b150"} Sep 30 00:28:06 crc kubenswrapper[4809]: I0930 00:28:06.129773 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" event={"ID":"fdf6876b-decd-4f69-ac2b-0725d2f47370","Type":"ContainerDied","Data":"b10b3c9a4ae062561b0a22e32cb18c74e55a46c88ec23f106221df26df1999b0"} Sep 30 00:28:06 crc kubenswrapper[4809]: I0930 00:28:06.129847 4809 scope.go:117] "RemoveContainer" containerID="be602dc1a063a35b8200940bbac878dcb2d11c0b792530d87a6240347f095ac6" Sep 30 00:28:06 crc kubenswrapper[4809]: I0930 00:28:06.129959 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-9kcbz" Sep 30 00:28:06 crc kubenswrapper[4809]: I0930 00:28:06.134556 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5","Type":"ContainerStarted","Data":"fec785ed4753816d4f6d33d243bbdb272255a749f26f6b843e4160e0a0ca3a36"} Sep 30 00:28:06 crc kubenswrapper[4809]: I0930 00:28:06.243279 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-9kcbz"] Sep 30 00:28:06 crc kubenswrapper[4809]: I0930 00:28:06.257249 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-9kcbz"] Sep 30 00:28:06 crc kubenswrapper[4809]: I0930 00:28:06.325615 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-etc-swift\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") " pod="openstack/swift-storage-0" Sep 30 00:28:06 crc kubenswrapper[4809]: E0930 00:28:06.325828 4809 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 00:28:06 crc kubenswrapper[4809]: E0930 00:28:06.326203 4809 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 00:28:06 crc kubenswrapper[4809]: E0930 00:28:06.326268 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-etc-swift podName:8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f nodeName:}" failed. No retries permitted until 2025-09-30 00:28:08.32624498 +0000 UTC m=+1139.362494388 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-etc-swift") pod "swift-storage-0" (UID: "8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f") : configmap "swift-ring-files" not found Sep 30 00:28:07 crc kubenswrapper[4809]: I0930 00:28:07.146985 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-gj5gk" event={"ID":"25170433-57de-43a1-8459-8c2e31c42ffb","Type":"ContainerStarted","Data":"a1a656863eb1d851d27c31fec10297b8a907f45be6450a97c40e34ba174e61fb"} Sep 30 00:28:07 crc kubenswrapper[4809]: I0930 00:28:07.147143 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:07 crc kubenswrapper[4809]: I0930 00:28:07.172007 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-gj5gk" podStartSLOduration=4.171985305 podStartE2EDuration="4.171985305s" podCreationTimestamp="2025-09-30 00:28:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:28:07.168496061 +0000 UTC m=+1138.204745479" watchObservedRunningTime="2025-09-30 00:28:07.171985305 +0000 UTC m=+1138.208234723" Sep 30 00:28:07 crc kubenswrapper[4809]: I0930 00:28:07.702448 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdf6876b-decd-4f69-ac2b-0725d2f47370" path="/var/lib/kubelet/pods/fdf6876b-decd-4f69-ac2b-0725d2f47370/volumes" Sep 30 00:28:08 crc kubenswrapper[4809]: I0930 00:28:08.161393 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5","Type":"ContainerStarted","Data":"69481fcd669dc0c6e7f166c0ebc326ad3c7c8ee437b344ec3cf66a7f6f52e984"} Sep 30 00:28:08 crc kubenswrapper[4809]: I0930 00:28:08.161785 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5","Type":"ContainerStarted","Data":"d4f28c536f5690b34ffe0d60e7dabc1889400cc9a7466625bde97e19c03bee1f"} Sep 30 00:28:08 crc kubenswrapper[4809]: I0930 00:28:08.193156 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.540664866 podStartE2EDuration="4.193138736s" podCreationTimestamp="2025-09-30 00:28:04 +0000 UTC" firstStartedPulling="2025-09-30 00:28:05.435010389 +0000 UTC m=+1136.471259797" lastFinishedPulling="2025-09-30 00:28:07.087484259 +0000 UTC m=+1138.123733667" observedRunningTime="2025-09-30 00:28:08.182272464 +0000 UTC m=+1139.218521872" watchObservedRunningTime="2025-09-30 00:28:08.193138736 +0000 UTC m=+1139.229388144" Sep 30 00:28:08 crc kubenswrapper[4809]: I0930 00:28:08.361172 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-etc-swift\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") " pod="openstack/swift-storage-0" Sep 30 00:28:08 crc kubenswrapper[4809]: E0930 00:28:08.361414 4809 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 00:28:08 crc kubenswrapper[4809]: E0930 00:28:08.361457 4809 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 00:28:08 crc kubenswrapper[4809]: E0930 00:28:08.361518 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-etc-swift podName:8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f nodeName:}" failed. No retries permitted until 2025-09-30 00:28:12.361499332 +0000 UTC m=+1143.397748740 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-etc-swift") pod "swift-storage-0" (UID: "8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f") : configmap "swift-ring-files" not found Sep 30 00:28:09 crc kubenswrapper[4809]: I0930 00:28:09.170069 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Sep 30 00:28:10 crc kubenswrapper[4809]: I0930 00:28:10.569087 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Sep 30 00:28:10 crc kubenswrapper[4809]: I0930 00:28:10.569381 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Sep 30 00:28:10 crc kubenswrapper[4809]: I0930 00:28:10.603320 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Sep 30 00:28:10 crc kubenswrapper[4809]: I0930 00:28:10.603374 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Sep 30 00:28:10 crc kubenswrapper[4809]: I0930 00:28:10.643539 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Sep 30 00:28:10 crc kubenswrapper[4809]: I0930 00:28:10.687908 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Sep 30 00:28:11 crc kubenswrapper[4809]: I0930 00:28:11.200356 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-rxwsm" event={"ID":"be3e3acf-e816-4c7d-a70e-1f51262676be","Type":"ContainerStarted","Data":"f442b944b8ff7707051eb2d5b49a9cb503e97113e587015485afcfc36bb380c5"} Sep 30 00:28:11 crc kubenswrapper[4809]: I0930 00:28:11.229471 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-rxwsm" podStartSLOduration=2.503143116 podStartE2EDuration="7.229451578s" podCreationTimestamp="2025-09-30 00:28:04 +0000 UTC" firstStartedPulling="2025-09-30 00:28:05.975600993 +0000 UTC m=+1137.011850401" lastFinishedPulling="2025-09-30 00:28:10.701909455 +0000 UTC m=+1141.738158863" observedRunningTime="2025-09-30 00:28:11.217076984 +0000 UTC m=+1142.253326382" watchObservedRunningTime="2025-09-30 00:28:11.229451578 +0000 UTC m=+1142.265700986" Sep 30 00:28:11 crc kubenswrapper[4809]: I0930 00:28:11.269384 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Sep 30 00:28:11 crc kubenswrapper[4809]: I0930 00:28:11.292917 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Sep 30 00:28:12 crc kubenswrapper[4809]: I0930 00:28:12.434876 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-etc-swift\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") " pod="openstack/swift-storage-0" Sep 30 00:28:12 crc kubenswrapper[4809]: E0930 00:28:12.435044 4809 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 00:28:12 crc kubenswrapper[4809]: E0930 00:28:12.435254 4809 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 00:28:12 crc kubenswrapper[4809]: E0930 00:28:12.435301 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-etc-swift podName:8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f nodeName:}" failed. No retries permitted until 2025-09-30 00:28:20.435286514 +0000 UTC m=+1151.471535922 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-etc-swift") pod "swift-storage-0" (UID: "8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f") : configmap "swift-ring-files" not found Sep 30 00:28:13 crc kubenswrapper[4809]: I0930 00:28:13.091915 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mysqld-exporter-openstack-db-create-w7ngf"] Sep 30 00:28:13 crc kubenswrapper[4809]: E0930 00:28:13.092285 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9611e11-5c01-45b3-962f-ae5dbcf76c95" containerName="init" Sep 30 00:28:13 crc kubenswrapper[4809]: I0930 00:28:13.092298 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9611e11-5c01-45b3-962f-ae5dbcf76c95" containerName="init" Sep 30 00:28:13 crc kubenswrapper[4809]: E0930 00:28:13.092322 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdf6876b-decd-4f69-ac2b-0725d2f47370" containerName="init" Sep 30 00:28:13 crc kubenswrapper[4809]: I0930 00:28:13.092329 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdf6876b-decd-4f69-ac2b-0725d2f47370" containerName="init" Sep 30 00:28:13 crc kubenswrapper[4809]: I0930 00:28:13.092506 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9611e11-5c01-45b3-962f-ae5dbcf76c95" containerName="init" Sep 30 00:28:13 crc kubenswrapper[4809]: I0930 00:28:13.092519 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdf6876b-decd-4f69-ac2b-0725d2f47370" containerName="init" Sep 30 00:28:13 crc kubenswrapper[4809]: I0930 00:28:13.093125 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-openstack-db-create-w7ngf" Sep 30 00:28:13 crc kubenswrapper[4809]: I0930 00:28:13.116965 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-openstack-db-create-w7ngf"] Sep 30 00:28:13 crc kubenswrapper[4809]: I0930 00:28:13.250358 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xl2dg\" (UniqueName: \"kubernetes.io/projected/0e0dcad9-a81c-4505-9d6c-63d3524defbd-kube-api-access-xl2dg\") pod \"mysqld-exporter-openstack-db-create-w7ngf\" (UID: \"0e0dcad9-a81c-4505-9d6c-63d3524defbd\") " pod="openstack/mysqld-exporter-openstack-db-create-w7ngf" Sep 30 00:28:13 crc kubenswrapper[4809]: I0930 00:28:13.352019 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xl2dg\" (UniqueName: \"kubernetes.io/projected/0e0dcad9-a81c-4505-9d6c-63d3524defbd-kube-api-access-xl2dg\") pod \"mysqld-exporter-openstack-db-create-w7ngf\" (UID: \"0e0dcad9-a81c-4505-9d6c-63d3524defbd\") " pod="openstack/mysqld-exporter-openstack-db-create-w7ngf" Sep 30 00:28:13 crc kubenswrapper[4809]: I0930 00:28:13.373460 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xl2dg\" (UniqueName: \"kubernetes.io/projected/0e0dcad9-a81c-4505-9d6c-63d3524defbd-kube-api-access-xl2dg\") pod \"mysqld-exporter-openstack-db-create-w7ngf\" (UID: \"0e0dcad9-a81c-4505-9d6c-63d3524defbd\") " pod="openstack/mysqld-exporter-openstack-db-create-w7ngf" Sep 30 00:28:13 crc kubenswrapper[4809]: I0930 00:28:13.415502 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-openstack-db-create-w7ngf" Sep 30 00:28:13 crc kubenswrapper[4809]: I0930 00:28:13.898237 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-openstack-db-create-w7ngf"] Sep 30 00:28:13 crc kubenswrapper[4809]: W0930 00:28:13.906157 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e0dcad9_a81c_4505_9d6c_63d3524defbd.slice/crio-a0102d520bf41e16d01ef7d03915fda6747dfd778c3625954c7363cbf6073ec2 WatchSource:0}: Error finding container a0102d520bf41e16d01ef7d03915fda6747dfd778c3625954c7363cbf6073ec2: Status 404 returned error can't find the container with id a0102d520bf41e16d01ef7d03915fda6747dfd778c3625954c7363cbf6073ec2 Sep 30 00:28:14 crc kubenswrapper[4809]: I0930 00:28:14.027909 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:14 crc kubenswrapper[4809]: I0930 00:28:14.104149 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-bpqmf"] Sep 30 00:28:14 crc kubenswrapper[4809]: I0930 00:28:14.104428 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" podUID="69cd5ddd-031a-4a2f-993c-0e6ed61cff30" containerName="dnsmasq-dns" containerID="cri-o://39d680e2d9e3a8c17c894b4e77955094eadf6d8f8e64a2ac8747d44b97b08508" gracePeriod=10 Sep 30 00:28:14 crc kubenswrapper[4809]: I0930 00:28:14.230232 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5681f926-bb52-4288-b0cc-ca30e087457d","Type":"ContainerStarted","Data":"9ee88241784fb7a6c4023ca1e41ac8f04bf174f3aa108256a133735b3824bd22"} Sep 30 00:28:14 crc kubenswrapper[4809]: I0930 00:28:14.231550 4809 generic.go:334] "Generic (PLEG): container finished" podID="0e0dcad9-a81c-4505-9d6c-63d3524defbd" containerID="fc85fbb277cd26a0f8bd99a6b7877dc8bca70ef4b64e3b25ffd83e0f294578ef" exitCode=0 Sep 30 00:28:14 crc kubenswrapper[4809]: I0930 00:28:14.231596 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-openstack-db-create-w7ngf" event={"ID":"0e0dcad9-a81c-4505-9d6c-63d3524defbd","Type":"ContainerDied","Data":"fc85fbb277cd26a0f8bd99a6b7877dc8bca70ef4b64e3b25ffd83e0f294578ef"} Sep 30 00:28:14 crc kubenswrapper[4809]: I0930 00:28:14.231611 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-openstack-db-create-w7ngf" event={"ID":"0e0dcad9-a81c-4505-9d6c-63d3524defbd","Type":"ContainerStarted","Data":"a0102d520bf41e16d01ef7d03915fda6747dfd778c3625954c7363cbf6073ec2"} Sep 30 00:28:14 crc kubenswrapper[4809]: I0930 00:28:14.233347 4809 generic.go:334] "Generic (PLEG): container finished" podID="69cd5ddd-031a-4a2f-993c-0e6ed61cff30" containerID="39d680e2d9e3a8c17c894b4e77955094eadf6d8f8e64a2ac8747d44b97b08508" exitCode=0 Sep 30 00:28:14 crc kubenswrapper[4809]: I0930 00:28:14.233370 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" event={"ID":"69cd5ddd-031a-4a2f-993c-0e6ed61cff30","Type":"ContainerDied","Data":"39d680e2d9e3a8c17c894b4e77955094eadf6d8f8e64a2ac8747d44b97b08508"} Sep 30 00:28:14 crc kubenswrapper[4809]: I0930 00:28:14.566609 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" Sep 30 00:28:14 crc kubenswrapper[4809]: I0930 00:28:14.596002 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69cd5ddd-031a-4a2f-993c-0e6ed61cff30-config\") pod \"69cd5ddd-031a-4a2f-993c-0e6ed61cff30\" (UID: \"69cd5ddd-031a-4a2f-993c-0e6ed61cff30\") " Sep 30 00:28:14 crc kubenswrapper[4809]: I0930 00:28:14.596269 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9znkc\" (UniqueName: \"kubernetes.io/projected/69cd5ddd-031a-4a2f-993c-0e6ed61cff30-kube-api-access-9znkc\") pod \"69cd5ddd-031a-4a2f-993c-0e6ed61cff30\" (UID: \"69cd5ddd-031a-4a2f-993c-0e6ed61cff30\") " Sep 30 00:28:14 crc kubenswrapper[4809]: I0930 00:28:14.596315 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/69cd5ddd-031a-4a2f-993c-0e6ed61cff30-dns-svc\") pod \"69cd5ddd-031a-4a2f-993c-0e6ed61cff30\" (UID: \"69cd5ddd-031a-4a2f-993c-0e6ed61cff30\") " Sep 30 00:28:14 crc kubenswrapper[4809]: I0930 00:28:14.604905 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69cd5ddd-031a-4a2f-993c-0e6ed61cff30-kube-api-access-9znkc" (OuterVolumeSpecName: "kube-api-access-9znkc") pod "69cd5ddd-031a-4a2f-993c-0e6ed61cff30" (UID: "69cd5ddd-031a-4a2f-993c-0e6ed61cff30"). InnerVolumeSpecName "kube-api-access-9znkc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:14 crc kubenswrapper[4809]: I0930 00:28:14.657521 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69cd5ddd-031a-4a2f-993c-0e6ed61cff30-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "69cd5ddd-031a-4a2f-993c-0e6ed61cff30" (UID: "69cd5ddd-031a-4a2f-993c-0e6ed61cff30"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:14 crc kubenswrapper[4809]: I0930 00:28:14.673608 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69cd5ddd-031a-4a2f-993c-0e6ed61cff30-config" (OuterVolumeSpecName: "config") pod "69cd5ddd-031a-4a2f-993c-0e6ed61cff30" (UID: "69cd5ddd-031a-4a2f-993c-0e6ed61cff30"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:14 crc kubenswrapper[4809]: I0930 00:28:14.701783 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9znkc\" (UniqueName: \"kubernetes.io/projected/69cd5ddd-031a-4a2f-993c-0e6ed61cff30-kube-api-access-9znkc\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:14 crc kubenswrapper[4809]: I0930 00:28:14.701822 4809 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/69cd5ddd-031a-4a2f-993c-0e6ed61cff30-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:14 crc kubenswrapper[4809]: I0930 00:28:14.701834 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69cd5ddd-031a-4a2f-993c-0e6ed61cff30-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:15 crc kubenswrapper[4809]: I0930 00:28:15.242484 4809 generic.go:334] "Generic (PLEG): container finished" podID="d7616359-f18a-4fba-b35a-327e65a0c05d" containerID="d069e4cc0bde9e03aa2c4113f1c7695e4fca7141970b029715da52511e304260" exitCode=0 Sep 30 00:28:15 crc kubenswrapper[4809]: I0930 00:28:15.242568 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d7616359-f18a-4fba-b35a-327e65a0c05d","Type":"ContainerDied","Data":"d069e4cc0bde9e03aa2c4113f1c7695e4fca7141970b029715da52511e304260"} Sep 30 00:28:15 crc kubenswrapper[4809]: I0930 00:28:15.245624 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" event={"ID":"69cd5ddd-031a-4a2f-993c-0e6ed61cff30","Type":"ContainerDied","Data":"be96a0c29e3a819d1296fcb1c64bde912bb736dd52d65755fe7922529cbff68a"} Sep 30 00:28:15 crc kubenswrapper[4809]: I0930 00:28:15.245928 4809 scope.go:117] "RemoveContainer" containerID="39d680e2d9e3a8c17c894b4e77955094eadf6d8f8e64a2ac8747d44b97b08508" Sep 30 00:28:15 crc kubenswrapper[4809]: I0930 00:28:15.246842 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-bpqmf" Sep 30 00:28:15 crc kubenswrapper[4809]: I0930 00:28:15.246927 4809 generic.go:334] "Generic (PLEG): container finished" podID="095a7ca7-bda1-498c-8a6d-16de67eb0a70" containerID="743f47fb70d0bb64c618e80220574e87e303504d35695209df55120b5be8211a" exitCode=0 Sep 30 00:28:15 crc kubenswrapper[4809]: I0930 00:28:15.246971 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"095a7ca7-bda1-498c-8a6d-16de67eb0a70","Type":"ContainerDied","Data":"743f47fb70d0bb64c618e80220574e87e303504d35695209df55120b5be8211a"} Sep 30 00:28:15 crc kubenswrapper[4809]: I0930 00:28:15.283982 4809 scope.go:117] "RemoveContainer" containerID="44937d2ce766c94bcfdfb5f438244dee29404305246ff4def965881e41adc453" Sep 30 00:28:15 crc kubenswrapper[4809]: I0930 00:28:15.330897 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-bpqmf"] Sep 30 00:28:15 crc kubenswrapper[4809]: I0930 00:28:15.341116 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-bpqmf"] Sep 30 00:28:15 crc kubenswrapper[4809]: I0930 00:28:15.699912 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-openstack-db-create-w7ngf" Sep 30 00:28:15 crc kubenswrapper[4809]: I0930 00:28:15.709488 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69cd5ddd-031a-4a2f-993c-0e6ed61cff30" path="/var/lib/kubelet/pods/69cd5ddd-031a-4a2f-993c-0e6ed61cff30/volumes" Sep 30 00:28:15 crc kubenswrapper[4809]: I0930 00:28:15.822671 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xl2dg\" (UniqueName: \"kubernetes.io/projected/0e0dcad9-a81c-4505-9d6c-63d3524defbd-kube-api-access-xl2dg\") pod \"0e0dcad9-a81c-4505-9d6c-63d3524defbd\" (UID: \"0e0dcad9-a81c-4505-9d6c-63d3524defbd\") " Sep 30 00:28:15 crc kubenswrapper[4809]: I0930 00:28:15.827943 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e0dcad9-a81c-4505-9d6c-63d3524defbd-kube-api-access-xl2dg" (OuterVolumeSpecName: "kube-api-access-xl2dg") pod "0e0dcad9-a81c-4505-9d6c-63d3524defbd" (UID: "0e0dcad9-a81c-4505-9d6c-63d3524defbd"). InnerVolumeSpecName "kube-api-access-xl2dg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:15 crc kubenswrapper[4809]: I0930 00:28:15.925096 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xl2dg\" (UniqueName: \"kubernetes.io/projected/0e0dcad9-a81c-4505-9d6c-63d3524defbd-kube-api-access-xl2dg\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:16 crc kubenswrapper[4809]: I0930 00:28:16.260429 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"095a7ca7-bda1-498c-8a6d-16de67eb0a70","Type":"ContainerStarted","Data":"6f5b6d99440ced337d2ecb0484994816df37c9fafccfc3fbb4ee0f9f01f0fef3"} Sep 30 00:28:16 crc kubenswrapper[4809]: I0930 00:28:16.263165 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d7616359-f18a-4fba-b35a-327e65a0c05d","Type":"ContainerStarted","Data":"1fb99b015248034a11488608f3491637cb199733b5aecc2399cc1298b976af4b"} Sep 30 00:28:16 crc kubenswrapper[4809]: I0930 00:28:16.263603 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:28:16 crc kubenswrapper[4809]: I0930 00:28:16.278962 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-openstack-db-create-w7ngf" event={"ID":"0e0dcad9-a81c-4505-9d6c-63d3524defbd","Type":"ContainerDied","Data":"a0102d520bf41e16d01ef7d03915fda6747dfd778c3625954c7363cbf6073ec2"} Sep 30 00:28:16 crc kubenswrapper[4809]: I0930 00:28:16.279004 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0102d520bf41e16d01ef7d03915fda6747dfd778c3625954c7363cbf6073ec2" Sep 30 00:28:16 crc kubenswrapper[4809]: I0930 00:28:16.279071 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-openstack-db-create-w7ngf" Sep 30 00:28:16 crc kubenswrapper[4809]: I0930 00:28:16.293947 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=49.071676453 podStartE2EDuration="50.293926781s" podCreationTimestamp="2025-09-30 00:27:26 +0000 UTC" firstStartedPulling="2025-09-30 00:27:42.370263759 +0000 UTC m=+1113.406513167" lastFinishedPulling="2025-09-30 00:27:43.592514087 +0000 UTC m=+1114.628763495" observedRunningTime="2025-09-30 00:28:16.283755297 +0000 UTC m=+1147.320004725" watchObservedRunningTime="2025-09-30 00:28:16.293926781 +0000 UTC m=+1147.330176199" Sep 30 00:28:16 crc kubenswrapper[4809]: I0930 00:28:16.315591 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=47.304677467 podStartE2EDuration="50.315567754s" podCreationTimestamp="2025-09-30 00:27:26 +0000 UTC" firstStartedPulling="2025-09-30 00:27:40.577324625 +0000 UTC m=+1111.613574033" lastFinishedPulling="2025-09-30 00:27:43.588214912 +0000 UTC m=+1114.624464320" observedRunningTime="2025-09-30 00:28:16.308269528 +0000 UTC m=+1147.344518936" watchObservedRunningTime="2025-09-30 00:28:16.315567754 +0000 UTC m=+1147.351817172" Sep 30 00:28:17 crc kubenswrapper[4809]: I0930 00:28:17.313301 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5681f926-bb52-4288-b0cc-ca30e087457d","Type":"ContainerStarted","Data":"26cc8d2f916f76c2e824c2de084e50d66a153de20bab59c2b0f78249460b18d4"} Sep 30 00:28:17 crc kubenswrapper[4809]: I0930 00:28:17.477246 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 30 00:28:18 crc kubenswrapper[4809]: I0930 00:28:18.322835 4809 generic.go:334] "Generic (PLEG): container finished" podID="be3e3acf-e816-4c7d-a70e-1f51262676be" containerID="f442b944b8ff7707051eb2d5b49a9cb503e97113e587015485afcfc36bb380c5" exitCode=0 Sep 30 00:28:18 crc kubenswrapper[4809]: I0930 00:28:18.322915 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-rxwsm" event={"ID":"be3e3acf-e816-4c7d-a70e-1f51262676be","Type":"ContainerDied","Data":"f442b944b8ff7707051eb2d5b49a9cb503e97113e587015485afcfc36bb380c5"} Sep 30 00:28:19 crc kubenswrapper[4809]: I0930 00:28:19.730422 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:19 crc kubenswrapper[4809]: I0930 00:28:19.764197 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Sep 30 00:28:19 crc kubenswrapper[4809]: I0930 00:28:19.911339 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hfxkc\" (UniqueName: \"kubernetes.io/projected/be3e3acf-e816-4c7d-a70e-1f51262676be-kube-api-access-hfxkc\") pod \"be3e3acf-e816-4c7d-a70e-1f51262676be\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " Sep 30 00:28:19 crc kubenswrapper[4809]: I0930 00:28:19.911419 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/be3e3acf-e816-4c7d-a70e-1f51262676be-swiftconf\") pod \"be3e3acf-e816-4c7d-a70e-1f51262676be\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " Sep 30 00:28:19 crc kubenswrapper[4809]: I0930 00:28:19.911528 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/be3e3acf-e816-4c7d-a70e-1f51262676be-scripts\") pod \"be3e3acf-e816-4c7d-a70e-1f51262676be\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " Sep 30 00:28:19 crc kubenswrapper[4809]: I0930 00:28:19.911671 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/be3e3acf-e816-4c7d-a70e-1f51262676be-etc-swift\") pod \"be3e3acf-e816-4c7d-a70e-1f51262676be\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " Sep 30 00:28:19 crc kubenswrapper[4809]: I0930 00:28:19.911719 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/be3e3acf-e816-4c7d-a70e-1f51262676be-dispersionconf\") pod \"be3e3acf-e816-4c7d-a70e-1f51262676be\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " Sep 30 00:28:19 crc kubenswrapper[4809]: I0930 00:28:19.911798 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/be3e3acf-e816-4c7d-a70e-1f51262676be-ring-data-devices\") pod \"be3e3acf-e816-4c7d-a70e-1f51262676be\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " Sep 30 00:28:19 crc kubenswrapper[4809]: I0930 00:28:19.911869 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be3e3acf-e816-4c7d-a70e-1f51262676be-combined-ca-bundle\") pod \"be3e3acf-e816-4c7d-a70e-1f51262676be\" (UID: \"be3e3acf-e816-4c7d-a70e-1f51262676be\") " Sep 30 00:28:19 crc kubenswrapper[4809]: I0930 00:28:19.913775 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be3e3acf-e816-4c7d-a70e-1f51262676be-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "be3e3acf-e816-4c7d-a70e-1f51262676be" (UID: "be3e3acf-e816-4c7d-a70e-1f51262676be"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:19 crc kubenswrapper[4809]: I0930 00:28:19.914008 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be3e3acf-e816-4c7d-a70e-1f51262676be-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "be3e3acf-e816-4c7d-a70e-1f51262676be" (UID: "be3e3acf-e816-4c7d-a70e-1f51262676be"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:28:19 crc kubenswrapper[4809]: I0930 00:28:19.919188 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be3e3acf-e816-4c7d-a70e-1f51262676be-kube-api-access-hfxkc" (OuterVolumeSpecName: "kube-api-access-hfxkc") pod "be3e3acf-e816-4c7d-a70e-1f51262676be" (UID: "be3e3acf-e816-4c7d-a70e-1f51262676be"). InnerVolumeSpecName "kube-api-access-hfxkc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:19 crc kubenswrapper[4809]: I0930 00:28:19.922503 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be3e3acf-e816-4c7d-a70e-1f51262676be-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "be3e3acf-e816-4c7d-a70e-1f51262676be" (UID: "be3e3acf-e816-4c7d-a70e-1f51262676be"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:28:19 crc kubenswrapper[4809]: I0930 00:28:19.941046 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-78fb9bb869-z7t6w" podUID="fbd086e6-14a9-4b1d-8586-b937b64dfa28" containerName="console" containerID="cri-o://0df70012ab4141c837123ccd7608c66b97e97cae637eec858e6cb5f79e74f00f" gracePeriod=15 Sep 30 00:28:19 crc kubenswrapper[4809]: I0930 00:28:19.948184 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be3e3acf-e816-4c7d-a70e-1f51262676be-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "be3e3acf-e816-4c7d-a70e-1f51262676be" (UID: "be3e3acf-e816-4c7d-a70e-1f51262676be"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:28:19 crc kubenswrapper[4809]: I0930 00:28:19.949919 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be3e3acf-e816-4c7d-a70e-1f51262676be-scripts" (OuterVolumeSpecName: "scripts") pod "be3e3acf-e816-4c7d-a70e-1f51262676be" (UID: "be3e3acf-e816-4c7d-a70e-1f51262676be"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:19 crc kubenswrapper[4809]: I0930 00:28:19.954892 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be3e3acf-e816-4c7d-a70e-1f51262676be-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "be3e3acf-e816-4c7d-a70e-1f51262676be" (UID: "be3e3acf-e816-4c7d-a70e-1f51262676be"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.015686 4809 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/be3e3acf-e816-4c7d-a70e-1f51262676be-dispersionconf\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.016183 4809 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/be3e3acf-e816-4c7d-a70e-1f51262676be-ring-data-devices\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.016197 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be3e3acf-e816-4c7d-a70e-1f51262676be-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.016210 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hfxkc\" (UniqueName: \"kubernetes.io/projected/be3e3acf-e816-4c7d-a70e-1f51262676be-kube-api-access-hfxkc\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.016224 4809 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/be3e3acf-e816-4c7d-a70e-1f51262676be-swiftconf\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.016238 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/be3e3acf-e816-4c7d-a70e-1f51262676be-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.016250 4809 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/be3e3acf-e816-4c7d-a70e-1f51262676be-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.350772 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-78fb9bb869-z7t6w_fbd086e6-14a9-4b1d-8586-b937b64dfa28/console/0.log" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.350930 4809 generic.go:334] "Generic (PLEG): container finished" podID="fbd086e6-14a9-4b1d-8586-b937b64dfa28" containerID="0df70012ab4141c837123ccd7608c66b97e97cae637eec858e6cb5f79e74f00f" exitCode=2 Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.351026 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-78fb9bb869-z7t6w" event={"ID":"fbd086e6-14a9-4b1d-8586-b937b64dfa28","Type":"ContainerDied","Data":"0df70012ab4141c837123ccd7608c66b97e97cae637eec858e6cb5f79e74f00f"} Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.352711 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-rxwsm" event={"ID":"be3e3acf-e816-4c7d-a70e-1f51262676be","Type":"ContainerDied","Data":"9dde0914023c02a9f125eff8ca95786c5f3eea8a6708d6aa5baa06d57cbdf094"} Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.352768 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9dde0914023c02a9f125eff8ca95786c5f3eea8a6708d6aa5baa06d57cbdf094" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.352783 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-rxwsm" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.528583 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-etc-swift\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") " pod="openstack/swift-storage-0" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.535351 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f-etc-swift\") pod \"swift-storage-0\" (UID: \"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f\") " pod="openstack/swift-storage-0" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.708328 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.791152 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-bgw8m"] Sep 30 00:28:20 crc kubenswrapper[4809]: E0930 00:28:20.791825 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be3e3acf-e816-4c7d-a70e-1f51262676be" containerName="swift-ring-rebalance" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.791842 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="be3e3acf-e816-4c7d-a70e-1f51262676be" containerName="swift-ring-rebalance" Sep 30 00:28:20 crc kubenswrapper[4809]: E0930 00:28:20.791863 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69cd5ddd-031a-4a2f-993c-0e6ed61cff30" containerName="init" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.791873 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="69cd5ddd-031a-4a2f-993c-0e6ed61cff30" containerName="init" Sep 30 00:28:20 crc kubenswrapper[4809]: E0930 00:28:20.791902 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e0dcad9-a81c-4505-9d6c-63d3524defbd" containerName="mariadb-database-create" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.791910 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e0dcad9-a81c-4505-9d6c-63d3524defbd" containerName="mariadb-database-create" Sep 30 00:28:20 crc kubenswrapper[4809]: E0930 00:28:20.791927 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69cd5ddd-031a-4a2f-993c-0e6ed61cff30" containerName="dnsmasq-dns" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.791935 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="69cd5ddd-031a-4a2f-993c-0e6ed61cff30" containerName="dnsmasq-dns" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.792134 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="be3e3acf-e816-4c7d-a70e-1f51262676be" containerName="swift-ring-rebalance" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.792164 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e0dcad9-a81c-4505-9d6c-63d3524defbd" containerName="mariadb-database-create" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.792178 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="69cd5ddd-031a-4a2f-993c-0e6ed61cff30" containerName="dnsmasq-dns" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.792921 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-bgw8m" Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.805330 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-bgw8m"] Sep 30 00:28:20 crc kubenswrapper[4809]: I0930 00:28:20.939204 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txg2f\" (UniqueName: \"kubernetes.io/projected/ae496d32-ef13-44cc-81aa-06be39a6cb4b-kube-api-access-txg2f\") pod \"keystone-db-create-bgw8m\" (UID: \"ae496d32-ef13-44cc-81aa-06be39a6cb4b\") " pod="openstack/keystone-db-create-bgw8m" Sep 30 00:28:21 crc kubenswrapper[4809]: I0930 00:28:21.041755 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txg2f\" (UniqueName: \"kubernetes.io/projected/ae496d32-ef13-44cc-81aa-06be39a6cb4b-kube-api-access-txg2f\") pod \"keystone-db-create-bgw8m\" (UID: \"ae496d32-ef13-44cc-81aa-06be39a6cb4b\") " pod="openstack/keystone-db-create-bgw8m" Sep 30 00:28:21 crc kubenswrapper[4809]: I0930 00:28:21.064551 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txg2f\" (UniqueName: \"kubernetes.io/projected/ae496d32-ef13-44cc-81aa-06be39a6cb4b-kube-api-access-txg2f\") pod \"keystone-db-create-bgw8m\" (UID: \"ae496d32-ef13-44cc-81aa-06be39a6cb4b\") " pod="openstack/keystone-db-create-bgw8m" Sep 30 00:28:21 crc kubenswrapper[4809]: I0930 00:28:21.114688 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-bgw8m" Sep 30 00:28:21 crc kubenswrapper[4809]: I0930 00:28:21.131429 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-tbrzd"] Sep 30 00:28:21 crc kubenswrapper[4809]: I0930 00:28:21.132894 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tbrzd" Sep 30 00:28:21 crc kubenswrapper[4809]: I0930 00:28:21.147061 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-tbrzd"] Sep 30 00:28:21 crc kubenswrapper[4809]: I0930 00:28:21.245153 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k54cf\" (UniqueName: \"kubernetes.io/projected/66b5e01c-bf65-4303-a4b6-20f2fd145710-kube-api-access-k54cf\") pod \"placement-db-create-tbrzd\" (UID: \"66b5e01c-bf65-4303-a4b6-20f2fd145710\") " pod="openstack/placement-db-create-tbrzd" Sep 30 00:28:21 crc kubenswrapper[4809]: I0930 00:28:21.346993 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k54cf\" (UniqueName: \"kubernetes.io/projected/66b5e01c-bf65-4303-a4b6-20f2fd145710-kube-api-access-k54cf\") pod \"placement-db-create-tbrzd\" (UID: \"66b5e01c-bf65-4303-a4b6-20f2fd145710\") " pod="openstack/placement-db-create-tbrzd" Sep 30 00:28:21 crc kubenswrapper[4809]: I0930 00:28:21.357977 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-p59l6"] Sep 30 00:28:21 crc kubenswrapper[4809]: I0930 00:28:21.359122 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-p59l6" Sep 30 00:28:21 crc kubenswrapper[4809]: I0930 00:28:21.368576 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-p59l6"] Sep 30 00:28:21 crc kubenswrapper[4809]: I0930 00:28:21.376186 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k54cf\" (UniqueName: \"kubernetes.io/projected/66b5e01c-bf65-4303-a4b6-20f2fd145710-kube-api-access-k54cf\") pod \"placement-db-create-tbrzd\" (UID: \"66b5e01c-bf65-4303-a4b6-20f2fd145710\") " pod="openstack/placement-db-create-tbrzd" Sep 30 00:28:21 crc kubenswrapper[4809]: I0930 00:28:21.513773 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tbrzd" Sep 30 00:28:21 crc kubenswrapper[4809]: I0930 00:28:21.550336 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktmbp\" (UniqueName: \"kubernetes.io/projected/5b39da91-0d25-4093-babd-f7ae25503f07-kube-api-access-ktmbp\") pod \"glance-db-create-p59l6\" (UID: \"5b39da91-0d25-4093-babd-f7ae25503f07\") " pod="openstack/glance-db-create-p59l6" Sep 30 00:28:21 crc kubenswrapper[4809]: I0930 00:28:21.652048 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktmbp\" (UniqueName: \"kubernetes.io/projected/5b39da91-0d25-4093-babd-f7ae25503f07-kube-api-access-ktmbp\") pod \"glance-db-create-p59l6\" (UID: \"5b39da91-0d25-4093-babd-f7ae25503f07\") " pod="openstack/glance-db-create-p59l6" Sep 30 00:28:21 crc kubenswrapper[4809]: I0930 00:28:21.672269 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktmbp\" (UniqueName: \"kubernetes.io/projected/5b39da91-0d25-4093-babd-f7ae25503f07-kube-api-access-ktmbp\") pod \"glance-db-create-p59l6\" (UID: \"5b39da91-0d25-4093-babd-f7ae25503f07\") " pod="openstack/glance-db-create-p59l6" Sep 30 00:28:21 crc kubenswrapper[4809]: I0930 00:28:21.713385 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-p59l6" Sep 30 00:28:21 crc kubenswrapper[4809]: I0930 00:28:21.948996 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-78fb9bb869-z7t6w_fbd086e6-14a9-4b1d-8586-b937b64dfa28/console/0.log" Sep 30 00:28:21 crc kubenswrapper[4809]: I0930 00:28:21.949326 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.058423 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fbd086e6-14a9-4b1d-8586-b937b64dfa28-console-oauth-config\") pod \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.058561 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fbd086e6-14a9-4b1d-8586-b937b64dfa28-console-serving-cert\") pod \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.058625 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-console-config\") pod \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.058698 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-trusted-ca-bundle\") pod \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.058730 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-oauth-serving-cert\") pod \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.058834 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbpm4\" (UniqueName: \"kubernetes.io/projected/fbd086e6-14a9-4b1d-8586-b937b64dfa28-kube-api-access-rbpm4\") pod \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.058884 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-service-ca\") pod \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\" (UID: \"fbd086e6-14a9-4b1d-8586-b937b64dfa28\") " Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.059560 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-console-config" (OuterVolumeSpecName: "console-config") pod "fbd086e6-14a9-4b1d-8586-b937b64dfa28" (UID: "fbd086e6-14a9-4b1d-8586-b937b64dfa28"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.059582 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "fbd086e6-14a9-4b1d-8586-b937b64dfa28" (UID: "fbd086e6-14a9-4b1d-8586-b937b64dfa28"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.059603 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-service-ca" (OuterVolumeSpecName: "service-ca") pod "fbd086e6-14a9-4b1d-8586-b937b64dfa28" (UID: "fbd086e6-14a9-4b1d-8586-b937b64dfa28"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.059613 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "fbd086e6-14a9-4b1d-8586-b937b64dfa28" (UID: "fbd086e6-14a9-4b1d-8586-b937b64dfa28"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.063836 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbd086e6-14a9-4b1d-8586-b937b64dfa28-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "fbd086e6-14a9-4b1d-8586-b937b64dfa28" (UID: "fbd086e6-14a9-4b1d-8586-b937b64dfa28"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.064778 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbd086e6-14a9-4b1d-8586-b937b64dfa28-kube-api-access-rbpm4" (OuterVolumeSpecName: "kube-api-access-rbpm4") pod "fbd086e6-14a9-4b1d-8586-b937b64dfa28" (UID: "fbd086e6-14a9-4b1d-8586-b937b64dfa28"). InnerVolumeSpecName "kube-api-access-rbpm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.064879 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbd086e6-14a9-4b1d-8586-b937b64dfa28-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "fbd086e6-14a9-4b1d-8586-b937b64dfa28" (UID: "fbd086e6-14a9-4b1d-8586-b937b64dfa28"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.160426 4809 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fbd086e6-14a9-4b1d-8586-b937b64dfa28-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.160462 4809 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fbd086e6-14a9-4b1d-8586-b937b64dfa28-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.160472 4809 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-console-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.160484 4809 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.160493 4809 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.160501 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbpm4\" (UniqueName: \"kubernetes.io/projected/fbd086e6-14a9-4b1d-8586-b937b64dfa28-kube-api-access-rbpm4\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.160513 4809 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fbd086e6-14a9-4b1d-8586-b937b64dfa28-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.378180 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-78fb9bb869-z7t6w_fbd086e6-14a9-4b1d-8586-b937b64dfa28/console/0.log" Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.378248 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-78fb9bb869-z7t6w" event={"ID":"fbd086e6-14a9-4b1d-8586-b937b64dfa28","Type":"ContainerDied","Data":"f5e9c3f07726cd7c0bebb46f9ddd906887acfbc16fdbdbf678173bab2d3d1d0e"} Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.378285 4809 scope.go:117] "RemoveContainer" containerID="0df70012ab4141c837123ccd7608c66b97e97cae637eec858e6cb5f79e74f00f" Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.378415 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-78fb9bb869-z7t6w" Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.424604 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-78fb9bb869-z7t6w"] Sep 30 00:28:22 crc kubenswrapper[4809]: I0930 00:28:22.437094 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-78fb9bb869-z7t6w"] Sep 30 00:28:23 crc kubenswrapper[4809]: I0930 00:28:23.143789 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mysqld-exporter-f99b-account-create-j64pv"] Sep 30 00:28:23 crc kubenswrapper[4809]: E0930 00:28:23.147620 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbd086e6-14a9-4b1d-8586-b937b64dfa28" containerName="console" Sep 30 00:28:23 crc kubenswrapper[4809]: I0930 00:28:23.147658 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbd086e6-14a9-4b1d-8586-b937b64dfa28" containerName="console" Sep 30 00:28:23 crc kubenswrapper[4809]: I0930 00:28:23.147863 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbd086e6-14a9-4b1d-8586-b937b64dfa28" containerName="console" Sep 30 00:28:23 crc kubenswrapper[4809]: I0930 00:28:23.148726 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-f99b-account-create-j64pv" Sep 30 00:28:23 crc kubenswrapper[4809]: I0930 00:28:23.151043 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"mysqld-exporter-openstack-db-secret" Sep 30 00:28:23 crc kubenswrapper[4809]: I0930 00:28:23.156216 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-f99b-account-create-j64pv"] Sep 30 00:28:23 crc kubenswrapper[4809]: I0930 00:28:23.270241 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-tbrzd"] Sep 30 00:28:23 crc kubenswrapper[4809]: I0930 00:28:23.280091 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c92rj\" (UniqueName: \"kubernetes.io/projected/83ce5843-8bf2-4db6-b80e-dc6aade1f47d-kube-api-access-c92rj\") pod \"mysqld-exporter-f99b-account-create-j64pv\" (UID: \"83ce5843-8bf2-4db6-b80e-dc6aade1f47d\") " pod="openstack/mysqld-exporter-f99b-account-create-j64pv" Sep 30 00:28:23 crc kubenswrapper[4809]: I0930 00:28:23.284042 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-bgw8m"] Sep 30 00:28:23 crc kubenswrapper[4809]: W0930 00:28:23.293668 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae496d32_ef13_44cc_81aa_06be39a6cb4b.slice/crio-6fc2f0f2015702d2ab7b9e7088dffa5753faa48cbddb710d99fc654f7e42736f WatchSource:0}: Error finding container 6fc2f0f2015702d2ab7b9e7088dffa5753faa48cbddb710d99fc654f7e42736f: Status 404 returned error can't find the container with id 6fc2f0f2015702d2ab7b9e7088dffa5753faa48cbddb710d99fc654f7e42736f Sep 30 00:28:23 crc kubenswrapper[4809]: I0930 00:28:23.382024 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c92rj\" (UniqueName: \"kubernetes.io/projected/83ce5843-8bf2-4db6-b80e-dc6aade1f47d-kube-api-access-c92rj\") pod \"mysqld-exporter-f99b-account-create-j64pv\" (UID: \"83ce5843-8bf2-4db6-b80e-dc6aade1f47d\") " pod="openstack/mysqld-exporter-f99b-account-create-j64pv" Sep 30 00:28:23 crc kubenswrapper[4809]: I0930 00:28:23.389599 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tbrzd" event={"ID":"66b5e01c-bf65-4303-a4b6-20f2fd145710","Type":"ContainerStarted","Data":"eb270b39d20738b0b0025d44d3a6cdc20a6bf3132209424d7282cf50ae4e0e50"} Sep 30 00:28:23 crc kubenswrapper[4809]: I0930 00:28:23.390984 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-bgw8m" event={"ID":"ae496d32-ef13-44cc-81aa-06be39a6cb4b","Type":"ContainerStarted","Data":"6fc2f0f2015702d2ab7b9e7088dffa5753faa48cbddb710d99fc654f7e42736f"} Sep 30 00:28:23 crc kubenswrapper[4809]: I0930 00:28:23.408927 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c92rj\" (UniqueName: \"kubernetes.io/projected/83ce5843-8bf2-4db6-b80e-dc6aade1f47d-kube-api-access-c92rj\") pod \"mysqld-exporter-f99b-account-create-j64pv\" (UID: \"83ce5843-8bf2-4db6-b80e-dc6aade1f47d\") " pod="openstack/mysqld-exporter-f99b-account-create-j64pv" Sep 30 00:28:23 crc kubenswrapper[4809]: I0930 00:28:23.469212 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-f99b-account-create-j64pv" Sep 30 00:28:23 crc kubenswrapper[4809]: I0930 00:28:23.500120 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-p59l6"] Sep 30 00:28:23 crc kubenswrapper[4809]: I0930 00:28:23.598802 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 30 00:28:23 crc kubenswrapper[4809]: W0930 00:28:23.612614 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8f2a0baa_5f99_4c6d_a1bd_9fc7cadb1b8f.slice/crio-38978cd4832aa6e424fd67b142153addcf8dc8d2caac076549d436ebc3d7a374 WatchSource:0}: Error finding container 38978cd4832aa6e424fd67b142153addcf8dc8d2caac076549d436ebc3d7a374: Status 404 returned error can't find the container with id 38978cd4832aa6e424fd67b142153addcf8dc8d2caac076549d436ebc3d7a374 Sep 30 00:28:23 crc kubenswrapper[4809]: I0930 00:28:23.744741 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbd086e6-14a9-4b1d-8586-b937b64dfa28" path="/var/lib/kubelet/pods/fbd086e6-14a9-4b1d-8586-b937b64dfa28/volumes" Sep 30 00:28:24 crc kubenswrapper[4809]: I0930 00:28:24.008470 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-f99b-account-create-j64pv"] Sep 30 00:28:24 crc kubenswrapper[4809]: I0930 00:28:24.411984 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-f99b-account-create-j64pv" event={"ID":"83ce5843-8bf2-4db6-b80e-dc6aade1f47d","Type":"ContainerStarted","Data":"f69558d61d74803d53635d2c87556a14a4b0922e2f9847e178618d8f48a2e10a"} Sep 30 00:28:24 crc kubenswrapper[4809]: I0930 00:28:24.415444 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5681f926-bb52-4288-b0cc-ca30e087457d","Type":"ContainerStarted","Data":"2e848be59263531c111fd1cfca53973e57b85a3e7187008869aaba0f8151c9bb"} Sep 30 00:28:24 crc kubenswrapper[4809]: I0930 00:28:24.416767 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tbrzd" event={"ID":"66b5e01c-bf65-4303-a4b6-20f2fd145710","Type":"ContainerStarted","Data":"e276e0c737f10eb66070179e438def9a33f07a875e417d82ec926bf59ab6d08a"} Sep 30 00:28:24 crc kubenswrapper[4809]: I0930 00:28:24.418490 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f","Type":"ContainerStarted","Data":"38978cd4832aa6e424fd67b142153addcf8dc8d2caac076549d436ebc3d7a374"} Sep 30 00:28:24 crc kubenswrapper[4809]: I0930 00:28:24.422427 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-bgw8m" event={"ID":"ae496d32-ef13-44cc-81aa-06be39a6cb4b","Type":"ContainerStarted","Data":"7594a4bebcf6878b7b71dc4cead1eb2cadf8d93eb563c2f0b74ef1d47d2bec31"} Sep 30 00:28:24 crc kubenswrapper[4809]: I0930 00:28:24.424167 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-p59l6" event={"ID":"5b39da91-0d25-4093-babd-f7ae25503f07","Type":"ContainerStarted","Data":"7bcc1a0ad45e219b1362f9dd13065a578aa2d772cbc8a2825f4be2184868d92d"} Sep 30 00:28:24 crc kubenswrapper[4809]: I0930 00:28:24.424211 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-p59l6" event={"ID":"5b39da91-0d25-4093-babd-f7ae25503f07","Type":"ContainerStarted","Data":"f799f51c9f3157e0cac4239e217b0e9814a82413f708907e1cad3f0acb4cf6f3"} Sep 30 00:28:24 crc kubenswrapper[4809]: I0930 00:28:24.436692 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=11.012355698 podStartE2EDuration="51.436675495s" podCreationTimestamp="2025-09-30 00:27:33 +0000 UTC" firstStartedPulling="2025-09-30 00:27:42.322444731 +0000 UTC m=+1113.358694139" lastFinishedPulling="2025-09-30 00:28:22.746764528 +0000 UTC m=+1153.783013936" observedRunningTime="2025-09-30 00:28:24.433439838 +0000 UTC m=+1155.469689256" watchObservedRunningTime="2025-09-30 00:28:24.436675495 +0000 UTC m=+1155.472924903" Sep 30 00:28:24 crc kubenswrapper[4809]: I0930 00:28:24.451513 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-tbrzd" podStartSLOduration=3.451495745 podStartE2EDuration="3.451495745s" podCreationTimestamp="2025-09-30 00:28:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:28:24.447283881 +0000 UTC m=+1155.483533289" watchObservedRunningTime="2025-09-30 00:28:24.451495745 +0000 UTC m=+1155.487745153" Sep 30 00:28:24 crc kubenswrapper[4809]: I0930 00:28:24.463437 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-p59l6" podStartSLOduration=3.463416526 podStartE2EDuration="3.463416526s" podCreationTimestamp="2025-09-30 00:28:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:28:24.462009438 +0000 UTC m=+1155.498258846" watchObservedRunningTime="2025-09-30 00:28:24.463416526 +0000 UTC m=+1155.499665934" Sep 30 00:28:24 crc kubenswrapper[4809]: I0930 00:28:24.480725 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-bgw8m" podStartSLOduration=4.480701102 podStartE2EDuration="4.480701102s" podCreationTimestamp="2025-09-30 00:28:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:28:24.474393592 +0000 UTC m=+1155.510643000" watchObservedRunningTime="2025-09-30 00:28:24.480701102 +0000 UTC m=+1155.516950510" Sep 30 00:28:24 crc kubenswrapper[4809]: I0930 00:28:24.489910 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:25 crc kubenswrapper[4809]: I0930 00:28:25.431869 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-f99b-account-create-j64pv" event={"ID":"83ce5843-8bf2-4db6-b80e-dc6aade1f47d","Type":"ContainerStarted","Data":"9a1dadb829781ef631ddbd69538f1611e0e68892d41a28d305f4b9d010d10a3f"} Sep 30 00:28:25 crc kubenswrapper[4809]: I0930 00:28:25.455393 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mysqld-exporter-f99b-account-create-j64pv" podStartSLOduration=2.455375731 podStartE2EDuration="2.455375731s" podCreationTimestamp="2025-09-30 00:28:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:28:25.445283018 +0000 UTC m=+1156.481532436" watchObservedRunningTime="2025-09-30 00:28:25.455375731 +0000 UTC m=+1156.491625139" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.226159 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-swbnb" podUID="40f19224-d223-4f7c-ad39-3afc9a8c18b1" containerName="ovn-controller" probeResult="failure" output=< Sep 30 00:28:27 crc kubenswrapper[4809]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 30 00:28:27 crc kubenswrapper[4809]: > Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.239337 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.239436 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-wgkv7" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.467124 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-swbnb-config-jkg5h"] Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.468493 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.470898 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.478836 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="d7616359-f18a-4fba-b35a-327e65a0c05d" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.120:5671: connect: connection refused" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.482323 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-swbnb-config-jkg5h"] Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.573695 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/78b4f4d8-e190-4075-a365-065d02b9f95e-var-log-ovn\") pod \"ovn-controller-swbnb-config-jkg5h\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.573750 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/78b4f4d8-e190-4075-a365-065d02b9f95e-var-run\") pod \"ovn-controller-swbnb-config-jkg5h\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.573783 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78b4f4d8-e190-4075-a365-065d02b9f95e-scripts\") pod \"ovn-controller-swbnb-config-jkg5h\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.573809 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/78b4f4d8-e190-4075-a365-065d02b9f95e-var-run-ovn\") pod \"ovn-controller-swbnb-config-jkg5h\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.573830 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqg7k\" (UniqueName: \"kubernetes.io/projected/78b4f4d8-e190-4075-a365-065d02b9f95e-kube-api-access-lqg7k\") pod \"ovn-controller-swbnb-config-jkg5h\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.573906 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/78b4f4d8-e190-4075-a365-065d02b9f95e-additional-scripts\") pod \"ovn-controller-swbnb-config-jkg5h\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.675815 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/78b4f4d8-e190-4075-a365-065d02b9f95e-additional-scripts\") pod \"ovn-controller-swbnb-config-jkg5h\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.675925 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/78b4f4d8-e190-4075-a365-065d02b9f95e-var-log-ovn\") pod \"ovn-controller-swbnb-config-jkg5h\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.675962 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/78b4f4d8-e190-4075-a365-065d02b9f95e-var-run\") pod \"ovn-controller-swbnb-config-jkg5h\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.675998 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78b4f4d8-e190-4075-a365-065d02b9f95e-scripts\") pod \"ovn-controller-swbnb-config-jkg5h\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.676021 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/78b4f4d8-e190-4075-a365-065d02b9f95e-var-run-ovn\") pod \"ovn-controller-swbnb-config-jkg5h\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.676040 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqg7k\" (UniqueName: \"kubernetes.io/projected/78b4f4d8-e190-4075-a365-065d02b9f95e-kube-api-access-lqg7k\") pod \"ovn-controller-swbnb-config-jkg5h\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.676345 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/78b4f4d8-e190-4075-a365-065d02b9f95e-var-log-ovn\") pod \"ovn-controller-swbnb-config-jkg5h\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.676442 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/78b4f4d8-e190-4075-a365-065d02b9f95e-var-run\") pod \"ovn-controller-swbnb-config-jkg5h\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.676467 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/78b4f4d8-e190-4075-a365-065d02b9f95e-var-run-ovn\") pod \"ovn-controller-swbnb-config-jkg5h\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.677069 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/78b4f4d8-e190-4075-a365-065d02b9f95e-additional-scripts\") pod \"ovn-controller-swbnb-config-jkg5h\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.678122 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78b4f4d8-e190-4075-a365-065d02b9f95e-scripts\") pod \"ovn-controller-swbnb-config-jkg5h\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.752448 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqg7k\" (UniqueName: \"kubernetes.io/projected/78b4f4d8-e190-4075-a365-065d02b9f95e-kube-api-access-lqg7k\") pod \"ovn-controller-swbnb-config-jkg5h\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.792007 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:27 crc kubenswrapper[4809]: I0930 00:28:27.825568 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="095a7ca7-bda1-498c-8a6d-16de67eb0a70" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.121:5671: connect: connection refused" Sep 30 00:28:28 crc kubenswrapper[4809]: I0930 00:28:28.462166 4809 generic.go:334] "Generic (PLEG): container finished" podID="66b5e01c-bf65-4303-a4b6-20f2fd145710" containerID="e276e0c737f10eb66070179e438def9a33f07a875e417d82ec926bf59ab6d08a" exitCode=0 Sep 30 00:28:28 crc kubenswrapper[4809]: I0930 00:28:28.462516 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tbrzd" event={"ID":"66b5e01c-bf65-4303-a4b6-20f2fd145710","Type":"ContainerDied","Data":"e276e0c737f10eb66070179e438def9a33f07a875e417d82ec926bf59ab6d08a"} Sep 30 00:28:28 crc kubenswrapper[4809]: I0930 00:28:28.871542 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-swbnb-config-jkg5h"] Sep 30 00:28:28 crc kubenswrapper[4809]: W0930 00:28:28.884186 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod78b4f4d8_e190_4075_a365_065d02b9f95e.slice/crio-fe4682c46d2d2bb132530fbc351e315ff70242c61c9ca37b3b2c92b194e683d5 WatchSource:0}: Error finding container fe4682c46d2d2bb132530fbc351e315ff70242c61c9ca37b3b2c92b194e683d5: Status 404 returned error can't find the container with id fe4682c46d2d2bb132530fbc351e315ff70242c61c9ca37b3b2c92b194e683d5 Sep 30 00:28:29 crc kubenswrapper[4809]: I0930 00:28:29.477919 4809 generic.go:334] "Generic (PLEG): container finished" podID="ae496d32-ef13-44cc-81aa-06be39a6cb4b" containerID="7594a4bebcf6878b7b71dc4cead1eb2cadf8d93eb563c2f0b74ef1d47d2bec31" exitCode=0 Sep 30 00:28:29 crc kubenswrapper[4809]: I0930 00:28:29.478034 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-bgw8m" event={"ID":"ae496d32-ef13-44cc-81aa-06be39a6cb4b","Type":"ContainerDied","Data":"7594a4bebcf6878b7b71dc4cead1eb2cadf8d93eb563c2f0b74ef1d47d2bec31"} Sep 30 00:28:29 crc kubenswrapper[4809]: I0930 00:28:29.479954 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-swbnb-config-jkg5h" event={"ID":"78b4f4d8-e190-4075-a365-065d02b9f95e","Type":"ContainerStarted","Data":"d468d242c880b6878e8f06191fbe849fca2c5010b6e3feb8f14bfee7c5b70917"} Sep 30 00:28:29 crc kubenswrapper[4809]: I0930 00:28:29.479994 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-swbnb-config-jkg5h" event={"ID":"78b4f4d8-e190-4075-a365-065d02b9f95e","Type":"ContainerStarted","Data":"fe4682c46d2d2bb132530fbc351e315ff70242c61c9ca37b3b2c92b194e683d5"} Sep 30 00:28:29 crc kubenswrapper[4809]: I0930 00:28:29.481418 4809 generic.go:334] "Generic (PLEG): container finished" podID="5b39da91-0d25-4093-babd-f7ae25503f07" containerID="7bcc1a0ad45e219b1362f9dd13065a578aa2d772cbc8a2825f4be2184868d92d" exitCode=0 Sep 30 00:28:29 crc kubenswrapper[4809]: I0930 00:28:29.481475 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-p59l6" event={"ID":"5b39da91-0d25-4093-babd-f7ae25503f07","Type":"ContainerDied","Data":"7bcc1a0ad45e219b1362f9dd13065a578aa2d772cbc8a2825f4be2184868d92d"} Sep 30 00:28:29 crc kubenswrapper[4809]: I0930 00:28:29.483634 4809 generic.go:334] "Generic (PLEG): container finished" podID="83ce5843-8bf2-4db6-b80e-dc6aade1f47d" containerID="9a1dadb829781ef631ddbd69538f1611e0e68892d41a28d305f4b9d010d10a3f" exitCode=0 Sep 30 00:28:29 crc kubenswrapper[4809]: I0930 00:28:29.483712 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-f99b-account-create-j64pv" event={"ID":"83ce5843-8bf2-4db6-b80e-dc6aade1f47d","Type":"ContainerDied","Data":"9a1dadb829781ef631ddbd69538f1611e0e68892d41a28d305f4b9d010d10a3f"} Sep 30 00:28:29 crc kubenswrapper[4809]: I0930 00:28:29.530036 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-swbnb-config-jkg5h" podStartSLOduration=2.5300199660000002 podStartE2EDuration="2.530019966s" podCreationTimestamp="2025-09-30 00:28:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:28:29.518322521 +0000 UTC m=+1160.554571929" watchObservedRunningTime="2025-09-30 00:28:29.530019966 +0000 UTC m=+1160.566269374" Sep 30 00:28:30 crc kubenswrapper[4809]: I0930 00:28:30.067793 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tbrzd" Sep 30 00:28:30 crc kubenswrapper[4809]: I0930 00:28:30.120464 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k54cf\" (UniqueName: \"kubernetes.io/projected/66b5e01c-bf65-4303-a4b6-20f2fd145710-kube-api-access-k54cf\") pod \"66b5e01c-bf65-4303-a4b6-20f2fd145710\" (UID: \"66b5e01c-bf65-4303-a4b6-20f2fd145710\") " Sep 30 00:28:30 crc kubenswrapper[4809]: I0930 00:28:30.128741 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66b5e01c-bf65-4303-a4b6-20f2fd145710-kube-api-access-k54cf" (OuterVolumeSpecName: "kube-api-access-k54cf") pod "66b5e01c-bf65-4303-a4b6-20f2fd145710" (UID: "66b5e01c-bf65-4303-a4b6-20f2fd145710"). InnerVolumeSpecName "kube-api-access-k54cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:30 crc kubenswrapper[4809]: I0930 00:28:30.223874 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k54cf\" (UniqueName: \"kubernetes.io/projected/66b5e01c-bf65-4303-a4b6-20f2fd145710-kube-api-access-k54cf\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:30 crc kubenswrapper[4809]: I0930 00:28:30.502633 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tbrzd" event={"ID":"66b5e01c-bf65-4303-a4b6-20f2fd145710","Type":"ContainerDied","Data":"eb270b39d20738b0b0025d44d3a6cdc20a6bf3132209424d7282cf50ae4e0e50"} Sep 30 00:28:30 crc kubenswrapper[4809]: I0930 00:28:30.502694 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb270b39d20738b0b0025d44d3a6cdc20a6bf3132209424d7282cf50ae4e0e50" Sep 30 00:28:30 crc kubenswrapper[4809]: I0930 00:28:30.502766 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tbrzd" Sep 30 00:28:30 crc kubenswrapper[4809]: I0930 00:28:30.514906 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f","Type":"ContainerStarted","Data":"22ff9a275487f1a8bd768da421606301e519d261bdaa0a017618c5085288c34f"} Sep 30 00:28:30 crc kubenswrapper[4809]: I0930 00:28:30.515231 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f","Type":"ContainerStarted","Data":"e871a6ce1513d28868d1fdb450a72cb517061e0d941242493f0ce6d6f1816019"} Sep 30 00:28:30 crc kubenswrapper[4809]: I0930 00:28:30.518789 4809 generic.go:334] "Generic (PLEG): container finished" podID="78b4f4d8-e190-4075-a365-065d02b9f95e" containerID="d468d242c880b6878e8f06191fbe849fca2c5010b6e3feb8f14bfee7c5b70917" exitCode=0 Sep 30 00:28:30 crc kubenswrapper[4809]: I0930 00:28:30.518884 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-swbnb-config-jkg5h" event={"ID":"78b4f4d8-e190-4075-a365-065d02b9f95e","Type":"ContainerDied","Data":"d468d242c880b6878e8f06191fbe849fca2c5010b6e3feb8f14bfee7c5b70917"} Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:30.975728 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-p59l6" Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.037103 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktmbp\" (UniqueName: \"kubernetes.io/projected/5b39da91-0d25-4093-babd-f7ae25503f07-kube-api-access-ktmbp\") pod \"5b39da91-0d25-4093-babd-f7ae25503f07\" (UID: \"5b39da91-0d25-4093-babd-f7ae25503f07\") " Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.042868 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b39da91-0d25-4093-babd-f7ae25503f07-kube-api-access-ktmbp" (OuterVolumeSpecName: "kube-api-access-ktmbp") pod "5b39da91-0d25-4093-babd-f7ae25503f07" (UID: "5b39da91-0d25-4093-babd-f7ae25503f07"). InnerVolumeSpecName "kube-api-access-ktmbp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.139388 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktmbp\" (UniqueName: \"kubernetes.io/projected/5b39da91-0d25-4093-babd-f7ae25503f07-kube-api-access-ktmbp\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.563143 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f","Type":"ContainerStarted","Data":"697cc84c370f4353bcf09199648615b821922e5ef6407b39888ae9024b5f2265"} Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.564083 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f","Type":"ContainerStarted","Data":"5bdf9ca7c14a290c2d2ca13bd6f56c1881656234a314ffef373aee4ab837f870"} Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.568237 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-p59l6" Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.568373 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-p59l6" event={"ID":"5b39da91-0d25-4093-babd-f7ae25503f07","Type":"ContainerDied","Data":"f799f51c9f3157e0cac4239e217b0e9814a82413f708907e1cad3f0acb4cf6f3"} Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.568438 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f799f51c9f3157e0cac4239e217b0e9814a82413f708907e1cad3f0acb4cf6f3" Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.698419 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-f99b-account-create-j64pv" Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.701494 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-bgw8m" Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.755438 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c92rj\" (UniqueName: \"kubernetes.io/projected/83ce5843-8bf2-4db6-b80e-dc6aade1f47d-kube-api-access-c92rj\") pod \"83ce5843-8bf2-4db6-b80e-dc6aade1f47d\" (UID: \"83ce5843-8bf2-4db6-b80e-dc6aade1f47d\") " Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.755541 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txg2f\" (UniqueName: \"kubernetes.io/projected/ae496d32-ef13-44cc-81aa-06be39a6cb4b-kube-api-access-txg2f\") pod \"ae496d32-ef13-44cc-81aa-06be39a6cb4b\" (UID: \"ae496d32-ef13-44cc-81aa-06be39a6cb4b\") " Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.762708 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83ce5843-8bf2-4db6-b80e-dc6aade1f47d-kube-api-access-c92rj" (OuterVolumeSpecName: "kube-api-access-c92rj") pod "83ce5843-8bf2-4db6-b80e-dc6aade1f47d" (UID: "83ce5843-8bf2-4db6-b80e-dc6aade1f47d"). InnerVolumeSpecName "kube-api-access-c92rj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.766934 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae496d32-ef13-44cc-81aa-06be39a6cb4b-kube-api-access-txg2f" (OuterVolumeSpecName: "kube-api-access-txg2f") pod "ae496d32-ef13-44cc-81aa-06be39a6cb4b" (UID: "ae496d32-ef13-44cc-81aa-06be39a6cb4b"). InnerVolumeSpecName "kube-api-access-txg2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.857858 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c92rj\" (UniqueName: \"kubernetes.io/projected/83ce5843-8bf2-4db6-b80e-dc6aade1f47d-kube-api-access-c92rj\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.857891 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txg2f\" (UniqueName: \"kubernetes.io/projected/ae496d32-ef13-44cc-81aa-06be39a6cb4b-kube-api-access-txg2f\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.876159 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.959114 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78b4f4d8-e190-4075-a365-065d02b9f95e-scripts\") pod \"78b4f4d8-e190-4075-a365-065d02b9f95e\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.959230 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqg7k\" (UniqueName: \"kubernetes.io/projected/78b4f4d8-e190-4075-a365-065d02b9f95e-kube-api-access-lqg7k\") pod \"78b4f4d8-e190-4075-a365-065d02b9f95e\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.959309 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/78b4f4d8-e190-4075-a365-065d02b9f95e-var-log-ovn\") pod \"78b4f4d8-e190-4075-a365-065d02b9f95e\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.959351 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/78b4f4d8-e190-4075-a365-065d02b9f95e-additional-scripts\") pod \"78b4f4d8-e190-4075-a365-065d02b9f95e\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.959479 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/78b4f4d8-e190-4075-a365-065d02b9f95e-var-run\") pod \"78b4f4d8-e190-4075-a365-065d02b9f95e\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.959532 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/78b4f4d8-e190-4075-a365-065d02b9f95e-var-run-ovn\") pod \"78b4f4d8-e190-4075-a365-065d02b9f95e\" (UID: \"78b4f4d8-e190-4075-a365-065d02b9f95e\") " Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.960153 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/78b4f4d8-e190-4075-a365-065d02b9f95e-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "78b4f4d8-e190-4075-a365-065d02b9f95e" (UID: "78b4f4d8-e190-4075-a365-065d02b9f95e"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.960837 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/78b4f4d8-e190-4075-a365-065d02b9f95e-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "78b4f4d8-e190-4075-a365-065d02b9f95e" (UID: "78b4f4d8-e190-4075-a365-065d02b9f95e"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.960891 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/78b4f4d8-e190-4075-a365-065d02b9f95e-var-run" (OuterVolumeSpecName: "var-run") pod "78b4f4d8-e190-4075-a365-065d02b9f95e" (UID: "78b4f4d8-e190-4075-a365-065d02b9f95e"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.961694 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78b4f4d8-e190-4075-a365-065d02b9f95e-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "78b4f4d8-e190-4075-a365-065d02b9f95e" (UID: "78b4f4d8-e190-4075-a365-065d02b9f95e"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.962172 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78b4f4d8-e190-4075-a365-065d02b9f95e-scripts" (OuterVolumeSpecName: "scripts") pod "78b4f4d8-e190-4075-a365-065d02b9f95e" (UID: "78b4f4d8-e190-4075-a365-065d02b9f95e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:31 crc kubenswrapper[4809]: I0930 00:28:31.965262 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78b4f4d8-e190-4075-a365-065d02b9f95e-kube-api-access-lqg7k" (OuterVolumeSpecName: "kube-api-access-lqg7k") pod "78b4f4d8-e190-4075-a365-065d02b9f95e" (UID: "78b4f4d8-e190-4075-a365-065d02b9f95e"). InnerVolumeSpecName "kube-api-access-lqg7k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:32 crc kubenswrapper[4809]: I0930 00:28:32.062264 4809 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/78b4f4d8-e190-4075-a365-065d02b9f95e-var-run\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:32 crc kubenswrapper[4809]: I0930 00:28:32.062301 4809 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/78b4f4d8-e190-4075-a365-065d02b9f95e-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:32 crc kubenswrapper[4809]: I0930 00:28:32.062311 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78b4f4d8-e190-4075-a365-065d02b9f95e-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:32 crc kubenswrapper[4809]: I0930 00:28:32.062320 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqg7k\" (UniqueName: \"kubernetes.io/projected/78b4f4d8-e190-4075-a365-065d02b9f95e-kube-api-access-lqg7k\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:32 crc kubenswrapper[4809]: I0930 00:28:32.062333 4809 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/78b4f4d8-e190-4075-a365-065d02b9f95e-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:32 crc kubenswrapper[4809]: I0930 00:28:32.062341 4809 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/78b4f4d8-e190-4075-a365-065d02b9f95e-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:32 crc kubenswrapper[4809]: I0930 00:28:32.207448 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-swbnb" Sep 30 00:28:32 crc kubenswrapper[4809]: I0930 00:28:32.587709 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-swbnb-config-jkg5h" event={"ID":"78b4f4d8-e190-4075-a365-065d02b9f95e","Type":"ContainerDied","Data":"fe4682c46d2d2bb132530fbc351e315ff70242c61c9ca37b3b2c92b194e683d5"} Sep 30 00:28:32 crc kubenswrapper[4809]: I0930 00:28:32.587985 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe4682c46d2d2bb132530fbc351e315ff70242c61c9ca37b3b2c92b194e683d5" Sep 30 00:28:32 crc kubenswrapper[4809]: I0930 00:28:32.587729 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-swbnb-config-jkg5h" Sep 30 00:28:32 crc kubenswrapper[4809]: I0930 00:28:32.591036 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-f99b-account-create-j64pv" event={"ID":"83ce5843-8bf2-4db6-b80e-dc6aade1f47d","Type":"ContainerDied","Data":"f69558d61d74803d53635d2c87556a14a4b0922e2f9847e178618d8f48a2e10a"} Sep 30 00:28:32 crc kubenswrapper[4809]: I0930 00:28:32.591071 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f69558d61d74803d53635d2c87556a14a4b0922e2f9847e178618d8f48a2e10a" Sep 30 00:28:32 crc kubenswrapper[4809]: I0930 00:28:32.591097 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-f99b-account-create-j64pv" Sep 30 00:28:32 crc kubenswrapper[4809]: I0930 00:28:32.605757 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-bgw8m" event={"ID":"ae496d32-ef13-44cc-81aa-06be39a6cb4b","Type":"ContainerDied","Data":"6fc2f0f2015702d2ab7b9e7088dffa5753faa48cbddb710d99fc654f7e42736f"} Sep 30 00:28:32 crc kubenswrapper[4809]: I0930 00:28:32.605787 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6fc2f0f2015702d2ab7b9e7088dffa5753faa48cbddb710d99fc654f7e42736f" Sep 30 00:28:32 crc kubenswrapper[4809]: I0930 00:28:32.605843 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-bgw8m" Sep 30 00:28:32 crc kubenswrapper[4809]: I0930 00:28:32.991077 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-swbnb-config-jkg5h"] Sep 30 00:28:32 crc kubenswrapper[4809]: I0930 00:28:32.997935 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-swbnb-config-jkg5h"] Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.419247 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mysqld-exporter-openstack-cell1-db-create-56n4d"] Sep 30 00:28:33 crc kubenswrapper[4809]: E0930 00:28:33.420207 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83ce5843-8bf2-4db6-b80e-dc6aade1f47d" containerName="mariadb-account-create" Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.420397 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="83ce5843-8bf2-4db6-b80e-dc6aade1f47d" containerName="mariadb-account-create" Sep 30 00:28:33 crc kubenswrapper[4809]: E0930 00:28:33.420479 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae496d32-ef13-44cc-81aa-06be39a6cb4b" containerName="mariadb-database-create" Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.420549 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae496d32-ef13-44cc-81aa-06be39a6cb4b" containerName="mariadb-database-create" Sep 30 00:28:33 crc kubenswrapper[4809]: E0930 00:28:33.420656 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66b5e01c-bf65-4303-a4b6-20f2fd145710" containerName="mariadb-database-create" Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.420735 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="66b5e01c-bf65-4303-a4b6-20f2fd145710" containerName="mariadb-database-create" Sep 30 00:28:33 crc kubenswrapper[4809]: E0930 00:28:33.420839 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b39da91-0d25-4093-babd-f7ae25503f07" containerName="mariadb-database-create" Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.420915 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b39da91-0d25-4093-babd-f7ae25503f07" containerName="mariadb-database-create" Sep 30 00:28:33 crc kubenswrapper[4809]: E0930 00:28:33.420993 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78b4f4d8-e190-4075-a365-065d02b9f95e" containerName="ovn-config" Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.421063 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="78b4f4d8-e190-4075-a365-065d02b9f95e" containerName="ovn-config" Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.421583 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b39da91-0d25-4093-babd-f7ae25503f07" containerName="mariadb-database-create" Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.421753 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae496d32-ef13-44cc-81aa-06be39a6cb4b" containerName="mariadb-database-create" Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.421836 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="83ce5843-8bf2-4db6-b80e-dc6aade1f47d" containerName="mariadb-account-create" Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.421922 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="66b5e01c-bf65-4303-a4b6-20f2fd145710" containerName="mariadb-database-create" Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.421999 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="78b4f4d8-e190-4075-a365-065d02b9f95e" containerName="ovn-config" Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.423012 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-openstack-cell1-db-create-56n4d" Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.446578 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-openstack-cell1-db-create-56n4d"] Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.492106 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwtsf\" (UniqueName: \"kubernetes.io/projected/de0d3f45-aea9-4cd7-9876-0de2feee40f8-kube-api-access-jwtsf\") pod \"mysqld-exporter-openstack-cell1-db-create-56n4d\" (UID: \"de0d3f45-aea9-4cd7-9876-0de2feee40f8\") " pod="openstack/mysqld-exporter-openstack-cell1-db-create-56n4d" Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.593993 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwtsf\" (UniqueName: \"kubernetes.io/projected/de0d3f45-aea9-4cd7-9876-0de2feee40f8-kube-api-access-jwtsf\") pod \"mysqld-exporter-openstack-cell1-db-create-56n4d\" (UID: \"de0d3f45-aea9-4cd7-9876-0de2feee40f8\") " pod="openstack/mysqld-exporter-openstack-cell1-db-create-56n4d" Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.619803 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f","Type":"ContainerStarted","Data":"955c53c58c4f2d1e14a3ad0f6e592b24d3dad3db08178f42b645fd0d7ef5a20c"} Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.619846 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f","Type":"ContainerStarted","Data":"0cf30215f1fe8b143215176797fd51cb40f9b71a818fe32f608493553569803c"} Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.619859 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f","Type":"ContainerStarted","Data":"5ac2c7590c2020b5c61aee98808e28a36fb45fb34a9c5fbbac272a8415be8e84"} Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.619868 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f","Type":"ContainerStarted","Data":"a85d171e0f6978343c963d5a15a75aa1d1eb9bd744bebb61d5182c06f97b15de"} Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.623209 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwtsf\" (UniqueName: \"kubernetes.io/projected/de0d3f45-aea9-4cd7-9876-0de2feee40f8-kube-api-access-jwtsf\") pod \"mysqld-exporter-openstack-cell1-db-create-56n4d\" (UID: \"de0d3f45-aea9-4cd7-9876-0de2feee40f8\") " pod="openstack/mysqld-exporter-openstack-cell1-db-create-56n4d" Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.703025 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78b4f4d8-e190-4075-a365-065d02b9f95e" path="/var/lib/kubelet/pods/78b4f4d8-e190-4075-a365-065d02b9f95e/volumes" Sep 30 00:28:33 crc kubenswrapper[4809]: I0930 00:28:33.751302 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-openstack-cell1-db-create-56n4d" Sep 30 00:28:34 crc kubenswrapper[4809]: I0930 00:28:34.311190 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-openstack-cell1-db-create-56n4d"] Sep 30 00:28:34 crc kubenswrapper[4809]: I0930 00:28:34.489864 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:34 crc kubenswrapper[4809]: I0930 00:28:34.492942 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:34 crc kubenswrapper[4809]: I0930 00:28:34.634809 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-openstack-cell1-db-create-56n4d" event={"ID":"de0d3f45-aea9-4cd7-9876-0de2feee40f8","Type":"ContainerDied","Data":"069ff1e20ae1561864452edfba68e7df09eb517dbf68daa25bc931209c6d5a6b"} Sep 30 00:28:34 crc kubenswrapper[4809]: I0930 00:28:34.634671 4809 generic.go:334] "Generic (PLEG): container finished" podID="de0d3f45-aea9-4cd7-9876-0de2feee40f8" containerID="069ff1e20ae1561864452edfba68e7df09eb517dbf68daa25bc931209c6d5a6b" exitCode=0 Sep 30 00:28:34 crc kubenswrapper[4809]: I0930 00:28:34.635582 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-openstack-cell1-db-create-56n4d" event={"ID":"de0d3f45-aea9-4cd7-9876-0de2feee40f8","Type":"ContainerStarted","Data":"7ecf54d50fa535678b4829e2c853f38bf442c60ec64f4cf37cc75a43f9364596"} Sep 30 00:28:34 crc kubenswrapper[4809]: I0930 00:28:34.636729 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:35 crc kubenswrapper[4809]: I0930 00:28:35.654707 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f","Type":"ContainerStarted","Data":"3326a9a3bb6b7e4da303a402545cb9d0e7df52fc48634f01994520123a19620b"} Sep 30 00:28:35 crc kubenswrapper[4809]: I0930 00:28:35.655069 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f","Type":"ContainerStarted","Data":"0541547575bfec06c05cf6bdb4a3ee565fc068ee56298e623506070ae379fb0f"} Sep 30 00:28:36 crc kubenswrapper[4809]: I0930 00:28:36.144589 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-openstack-cell1-db-create-56n4d" Sep 30 00:28:36 crc kubenswrapper[4809]: I0930 00:28:36.239815 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwtsf\" (UniqueName: \"kubernetes.io/projected/de0d3f45-aea9-4cd7-9876-0de2feee40f8-kube-api-access-jwtsf\") pod \"de0d3f45-aea9-4cd7-9876-0de2feee40f8\" (UID: \"de0d3f45-aea9-4cd7-9876-0de2feee40f8\") " Sep 30 00:28:36 crc kubenswrapper[4809]: I0930 00:28:36.246964 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de0d3f45-aea9-4cd7-9876-0de2feee40f8-kube-api-access-jwtsf" (OuterVolumeSpecName: "kube-api-access-jwtsf") pod "de0d3f45-aea9-4cd7-9876-0de2feee40f8" (UID: "de0d3f45-aea9-4cd7-9876-0de2feee40f8"). InnerVolumeSpecName "kube-api-access-jwtsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:36 crc kubenswrapper[4809]: I0930 00:28:36.342763 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwtsf\" (UniqueName: \"kubernetes.io/projected/de0d3f45-aea9-4cd7-9876-0de2feee40f8-kube-api-access-jwtsf\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:36 crc kubenswrapper[4809]: I0930 00:28:36.698544 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f","Type":"ContainerStarted","Data":"89589bfbac666e2adb4f3545264e45873f93fa86a3314642cf2c581b21de32be"} Sep 30 00:28:36 crc kubenswrapper[4809]: I0930 00:28:36.698580 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f","Type":"ContainerStarted","Data":"1385c57cde46742e6d67fc2327f5102c132fede2aa8fac4c52d0d09999e9ce09"} Sep 30 00:28:36 crc kubenswrapper[4809]: I0930 00:28:36.698589 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f","Type":"ContainerStarted","Data":"a93c540f7a65c7c7f38a8abc83b23cfb642a6357f5b6a22ac1848c9438ba2d96"} Sep 30 00:28:36 crc kubenswrapper[4809]: I0930 00:28:36.698598 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f","Type":"ContainerStarted","Data":"6be4c3c95a452392c99ec596496cfe92b1d71d719edc7fbb314a84b14632dee6"} Sep 30 00:28:36 crc kubenswrapper[4809]: I0930 00:28:36.708548 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-openstack-cell1-db-create-56n4d" event={"ID":"de0d3f45-aea9-4cd7-9876-0de2feee40f8","Type":"ContainerDied","Data":"7ecf54d50fa535678b4829e2c853f38bf442c60ec64f4cf37cc75a43f9364596"} Sep 30 00:28:36 crc kubenswrapper[4809]: I0930 00:28:36.708606 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ecf54d50fa535678b4829e2c853f38bf442c60ec64f4cf37cc75a43f9364596" Sep 30 00:28:36 crc kubenswrapper[4809]: I0930 00:28:36.708737 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-openstack-cell1-db-create-56n4d" Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.265572 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.269237 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="5681f926-bb52-4288-b0cc-ca30e087457d" containerName="prometheus" containerID="cri-o://9ee88241784fb7a6c4023ca1e41ac8f04bf174f3aa108256a133735b3824bd22" gracePeriod=600 Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.269355 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="5681f926-bb52-4288-b0cc-ca30e087457d" containerName="config-reloader" containerID="cri-o://26cc8d2f916f76c2e824c2de084e50d66a153de20bab59c2b0f78249460b18d4" gracePeriod=600 Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.269419 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="5681f926-bb52-4288-b0cc-ca30e087457d" containerName="thanos-sidecar" containerID="cri-o://2e848be59263531c111fd1cfca53973e57b85a3e7187008869aaba0f8151c9bb" gracePeriod=600 Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.479191 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.720150 4809 generic.go:334] "Generic (PLEG): container finished" podID="5681f926-bb52-4288-b0cc-ca30e087457d" containerID="2e848be59263531c111fd1cfca53973e57b85a3e7187008869aaba0f8151c9bb" exitCode=0 Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.720191 4809 generic.go:334] "Generic (PLEG): container finished" podID="5681f926-bb52-4288-b0cc-ca30e087457d" containerID="26cc8d2f916f76c2e824c2de084e50d66a153de20bab59c2b0f78249460b18d4" exitCode=0 Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.720201 4809 generic.go:334] "Generic (PLEG): container finished" podID="5681f926-bb52-4288-b0cc-ca30e087457d" containerID="9ee88241784fb7a6c4023ca1e41ac8f04bf174f3aa108256a133735b3824bd22" exitCode=0 Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.720216 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5681f926-bb52-4288-b0cc-ca30e087457d","Type":"ContainerDied","Data":"2e848be59263531c111fd1cfca53973e57b85a3e7187008869aaba0f8151c9bb"} Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.720266 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5681f926-bb52-4288-b0cc-ca30e087457d","Type":"ContainerDied","Data":"26cc8d2f916f76c2e824c2de084e50d66a153de20bab59c2b0f78249460b18d4"} Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.720279 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5681f926-bb52-4288-b0cc-ca30e087457d","Type":"ContainerDied","Data":"9ee88241784fb7a6c4023ca1e41ac8f04bf174f3aa108256a133735b3824bd22"} Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.738826 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f","Type":"ContainerStarted","Data":"48642e40c3dcd5a369cd9ae3731cec9a75160afc5bb61e1327f580e071478b0d"} Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.791922 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=23.470754607 podStartE2EDuration="34.791905531s" podCreationTimestamp="2025-09-30 00:28:03 +0000 UTC" firstStartedPulling="2025-09-30 00:28:23.614214398 +0000 UTC m=+1154.650463806" lastFinishedPulling="2025-09-30 00:28:34.935365322 +0000 UTC m=+1165.971614730" observedRunningTime="2025-09-30 00:28:37.786889106 +0000 UTC m=+1168.823138514" watchObservedRunningTime="2025-09-30 00:28:37.791905531 +0000 UTC m=+1168.828154929" Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.824837 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.838698 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-cfdjx"] Sep 30 00:28:37 crc kubenswrapper[4809]: E0930 00:28:37.839049 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de0d3f45-aea9-4cd7-9876-0de2feee40f8" containerName="mariadb-database-create" Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.839064 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="de0d3f45-aea9-4cd7-9876-0de2feee40f8" containerName="mariadb-database-create" Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.839260 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="de0d3f45-aea9-4cd7-9876-0de2feee40f8" containerName="mariadb-database-create" Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.839855 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-cfdjx" Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.860212 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-cfdjx"] Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.944090 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-shpsr"] Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.946214 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-shpsr" Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.961748 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-shpsr"] Sep 30 00:28:37 crc kubenswrapper[4809]: I0930 00:28:37.977683 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfvfk\" (UniqueName: \"kubernetes.io/projected/7f8f7eac-270c-47bc-bfbf-acd03e83bdf6-kube-api-access-mfvfk\") pod \"cinder-db-create-cfdjx\" (UID: \"7f8f7eac-270c-47bc-bfbf-acd03e83bdf6\") " pod="openstack/cinder-db-create-cfdjx" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.080271 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-sdjks"] Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.082131 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-sdjks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.086529 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzsdz\" (UniqueName: \"kubernetes.io/projected/8f3d710a-877b-4d2b-922a-83aba52d6e9c-kube-api-access-kzsdz\") pod \"barbican-db-create-shpsr\" (UID: \"8f3d710a-877b-4d2b-922a-83aba52d6e9c\") " pod="openstack/barbican-db-create-shpsr" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.086624 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfvfk\" (UniqueName: \"kubernetes.io/projected/7f8f7eac-270c-47bc-bfbf-acd03e83bdf6-kube-api-access-mfvfk\") pod \"cinder-db-create-cfdjx\" (UID: \"7f8f7eac-270c-47bc-bfbf-acd03e83bdf6\") " pod="openstack/cinder-db-create-cfdjx" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.087876 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-sdjks"] Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.121388 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfvfk\" (UniqueName: \"kubernetes.io/projected/7f8f7eac-270c-47bc-bfbf-acd03e83bdf6-kube-api-access-mfvfk\") pod \"cinder-db-create-cfdjx\" (UID: \"7f8f7eac-270c-47bc-bfbf-acd03e83bdf6\") " pod="openstack/cinder-db-create-cfdjx" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.159924 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-cfdjx" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.190836 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzsdz\" (UniqueName: \"kubernetes.io/projected/8f3d710a-877b-4d2b-922a-83aba52d6e9c-kube-api-access-kzsdz\") pod \"barbican-db-create-shpsr\" (UID: \"8f3d710a-877b-4d2b-922a-83aba52d6e9c\") " pod="openstack/barbican-db-create-shpsr" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.191211 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4j6dn\" (UniqueName: \"kubernetes.io/projected/d346f7a2-3667-4066-975e-9b834ddffcfd-kube-api-access-4j6dn\") pod \"heat-db-create-sdjks\" (UID: \"d346f7a2-3667-4066-975e-9b834ddffcfd\") " pod="openstack/heat-db-create-sdjks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.237438 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-d9jks"] Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.246866 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzsdz\" (UniqueName: \"kubernetes.io/projected/8f3d710a-877b-4d2b-922a-83aba52d6e9c-kube-api-access-kzsdz\") pod \"barbican-db-create-shpsr\" (UID: \"8f3d710a-877b-4d2b-922a-83aba52d6e9c\") " pod="openstack/barbican-db-create-shpsr" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.259960 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-d9jks"] Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.260065 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.264803 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.278306 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-shpsr" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.290725 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-hcb4b"] Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.291957 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-hcb4b" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.293048 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4j6dn\" (UniqueName: \"kubernetes.io/projected/d346f7a2-3667-4066-975e-9b834ddffcfd-kube-api-access-4j6dn\") pod \"heat-db-create-sdjks\" (UID: \"d346f7a2-3667-4066-975e-9b834ddffcfd\") " pod="openstack/heat-db-create-sdjks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.297916 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-hcb4b"] Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.330630 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4j6dn\" (UniqueName: \"kubernetes.io/projected/d346f7a2-3667-4066-975e-9b834ddffcfd-kube-api-access-4j6dn\") pod \"heat-db-create-sdjks\" (UID: \"d346f7a2-3667-4066-975e-9b834ddffcfd\") " pod="openstack/heat-db-create-sdjks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.394910 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-d9jks\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.396979 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-d9jks\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.397051 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-config\") pod \"dnsmasq-dns-77585f5f8c-d9jks\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.397115 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-d9jks\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.397284 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp65v\" (UniqueName: \"kubernetes.io/projected/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-kube-api-access-rp65v\") pod \"dnsmasq-dns-77585f5f8c-d9jks\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.397458 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6xjj\" (UniqueName: \"kubernetes.io/projected/60d6da9e-f759-4557-bc7a-9e73d53728a2-kube-api-access-l6xjj\") pod \"neutron-db-create-hcb4b\" (UID: \"60d6da9e-f759-4557-bc7a-9e73d53728a2\") " pod="openstack/neutron-db-create-hcb4b" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.397489 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-d9jks\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.460342 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.461147 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-sdjks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.501294 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/5681f926-bb52-4288-b0cc-ca30e087457d-prometheus-metric-storage-rulefiles-0\") pod \"5681f926-bb52-4288-b0cc-ca30e087457d\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.503332 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/5681f926-bb52-4288-b0cc-ca30e087457d-config-out\") pod \"5681f926-bb52-4288-b0cc-ca30e087457d\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.503481 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c58cec56-19b8-4689-9986-ba07a43850e3\") pod \"5681f926-bb52-4288-b0cc-ca30e087457d\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.503533 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5745h\" (UniqueName: \"kubernetes.io/projected/5681f926-bb52-4288-b0cc-ca30e087457d-kube-api-access-5745h\") pod \"5681f926-bb52-4288-b0cc-ca30e087457d\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.503598 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/5681f926-bb52-4288-b0cc-ca30e087457d-thanos-prometheus-http-client-file\") pod \"5681f926-bb52-4288-b0cc-ca30e087457d\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.503614 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5681f926-bb52-4288-b0cc-ca30e087457d-config\") pod \"5681f926-bb52-4288-b0cc-ca30e087457d\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.503726 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/5681f926-bb52-4288-b0cc-ca30e087457d-web-config\") pod \"5681f926-bb52-4288-b0cc-ca30e087457d\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.503756 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/5681f926-bb52-4288-b0cc-ca30e087457d-tls-assets\") pod \"5681f926-bb52-4288-b0cc-ca30e087457d\" (UID: \"5681f926-bb52-4288-b0cc-ca30e087457d\") " Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.503991 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-d9jks\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.504096 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp65v\" (UniqueName: \"kubernetes.io/projected/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-kube-api-access-rp65v\") pod \"dnsmasq-dns-77585f5f8c-d9jks\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.504224 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6xjj\" (UniqueName: \"kubernetes.io/projected/60d6da9e-f759-4557-bc7a-9e73d53728a2-kube-api-access-l6xjj\") pod \"neutron-db-create-hcb4b\" (UID: \"60d6da9e-f759-4557-bc7a-9e73d53728a2\") " pod="openstack/neutron-db-create-hcb4b" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.504249 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-d9jks\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.504345 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-d9jks\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.504384 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-d9jks\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.504451 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-config\") pod \"dnsmasq-dns-77585f5f8c-d9jks\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.507693 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-config\") pod \"dnsmasq-dns-77585f5f8c-d9jks\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.509914 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-d9jks\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.510856 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-d9jks\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.513774 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-d9jks\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.518497 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5681f926-bb52-4288-b0cc-ca30e087457d-config" (OuterVolumeSpecName: "config") pod "5681f926-bb52-4288-b0cc-ca30e087457d" (UID: "5681f926-bb52-4288-b0cc-ca30e087457d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.519385 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5681f926-bb52-4288-b0cc-ca30e087457d-kube-api-access-5745h" (OuterVolumeSpecName: "kube-api-access-5745h") pod "5681f926-bb52-4288-b0cc-ca30e087457d" (UID: "5681f926-bb52-4288-b0cc-ca30e087457d"). InnerVolumeSpecName "kube-api-access-5745h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.521145 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-d9jks\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.521367 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5681f926-bb52-4288-b0cc-ca30e087457d-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "5681f926-bb52-4288-b0cc-ca30e087457d" (UID: "5681f926-bb52-4288-b0cc-ca30e087457d"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.522765 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5681f926-bb52-4288-b0cc-ca30e087457d-config-out" (OuterVolumeSpecName: "config-out") pod "5681f926-bb52-4288-b0cc-ca30e087457d" (UID: "5681f926-bb52-4288-b0cc-ca30e087457d"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.523721 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5681f926-bb52-4288-b0cc-ca30e087457d-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "5681f926-bb52-4288-b0cc-ca30e087457d" (UID: "5681f926-bb52-4288-b0cc-ca30e087457d"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.531164 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5681f926-bb52-4288-b0cc-ca30e087457d-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "5681f926-bb52-4288-b0cc-ca30e087457d" (UID: "5681f926-bb52-4288-b0cc-ca30e087457d"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.542124 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6xjj\" (UniqueName: \"kubernetes.io/projected/60d6da9e-f759-4557-bc7a-9e73d53728a2-kube-api-access-l6xjj\") pod \"neutron-db-create-hcb4b\" (UID: \"60d6da9e-f759-4557-bc7a-9e73d53728a2\") " pod="openstack/neutron-db-create-hcb4b" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.544181 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp65v\" (UniqueName: \"kubernetes.io/projected/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-kube-api-access-rp65v\") pod \"dnsmasq-dns-77585f5f8c-d9jks\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.562277 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c58cec56-19b8-4689-9986-ba07a43850e3" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "5681f926-bb52-4288-b0cc-ca30e087457d" (UID: "5681f926-bb52-4288-b0cc-ca30e087457d"). InnerVolumeSpecName "pvc-c58cec56-19b8-4689-9986-ba07a43850e3". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.568138 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5681f926-bb52-4288-b0cc-ca30e087457d-web-config" (OuterVolumeSpecName: "web-config") pod "5681f926-bb52-4288-b0cc-ca30e087457d" (UID: "5681f926-bb52-4288-b0cc-ca30e087457d"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.606129 4809 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/5681f926-bb52-4288-b0cc-ca30e087457d-web-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.606309 4809 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/5681f926-bb52-4288-b0cc-ca30e087457d-tls-assets\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.606361 4809 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/5681f926-bb52-4288-b0cc-ca30e087457d-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.606441 4809 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/5681f926-bb52-4288-b0cc-ca30e087457d-config-out\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.606519 4809 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-c58cec56-19b8-4689-9986-ba07a43850e3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c58cec56-19b8-4689-9986-ba07a43850e3\") on node \"crc\" " Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.606573 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5745h\" (UniqueName: \"kubernetes.io/projected/5681f926-bb52-4288-b0cc-ca30e087457d-kube-api-access-5745h\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.606623 4809 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/5681f926-bb52-4288-b0cc-ca30e087457d-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.606688 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/5681f926-bb52-4288-b0cc-ca30e087457d-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.653248 4809 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.653565 4809 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-c58cec56-19b8-4689-9986-ba07a43850e3" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c58cec56-19b8-4689-9986-ba07a43850e3") on node "crc" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.684520 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.708247 4809 reconciler_common.go:293] "Volume detached for volume \"pvc-c58cec56-19b8-4689-9986-ba07a43850e3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c58cec56-19b8-4689-9986-ba07a43850e3\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.721254 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-hcb4b" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.762880 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5681f926-bb52-4288-b0cc-ca30e087457d","Type":"ContainerDied","Data":"279b74d25ec6d6c76f35d971da22e2f80b1fe1e77e31dbe22f583408171c8511"} Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.762954 4809 scope.go:117] "RemoveContainer" containerID="2e848be59263531c111fd1cfca53973e57b85a3e7187008869aaba0f8151c9bb" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.763059 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.803041 4809 scope.go:117] "RemoveContainer" containerID="26cc8d2f916f76c2e824c2de084e50d66a153de20bab59c2b0f78249460b18d4" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.835097 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.849252 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.865795 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 00:28:38 crc kubenswrapper[4809]: E0930 00:28:38.866180 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5681f926-bb52-4288-b0cc-ca30e087457d" containerName="init-config-reloader" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.866192 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5681f926-bb52-4288-b0cc-ca30e087457d" containerName="init-config-reloader" Sep 30 00:28:38 crc kubenswrapper[4809]: E0930 00:28:38.866206 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5681f926-bb52-4288-b0cc-ca30e087457d" containerName="config-reloader" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.866214 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5681f926-bb52-4288-b0cc-ca30e087457d" containerName="config-reloader" Sep 30 00:28:38 crc kubenswrapper[4809]: E0930 00:28:38.866226 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5681f926-bb52-4288-b0cc-ca30e087457d" containerName="prometheus" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.866232 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5681f926-bb52-4288-b0cc-ca30e087457d" containerName="prometheus" Sep 30 00:28:38 crc kubenswrapper[4809]: E0930 00:28:38.866256 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5681f926-bb52-4288-b0cc-ca30e087457d" containerName="thanos-sidecar" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.866261 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5681f926-bb52-4288-b0cc-ca30e087457d" containerName="thanos-sidecar" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.866413 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5681f926-bb52-4288-b0cc-ca30e087457d" containerName="config-reloader" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.866429 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5681f926-bb52-4288-b0cc-ca30e087457d" containerName="prometheus" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.866437 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5681f926-bb52-4288-b0cc-ca30e087457d" containerName="thanos-sidecar" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.867938 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.878058 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.879064 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.879293 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.879582 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-nv5s2" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.882749 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.883047 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.891398 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.947920 4809 scope.go:117] "RemoveContainer" containerID="9ee88241784fb7a6c4023ca1e41ac8f04bf174f3aa108256a133735b3824bd22" Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.950970 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-shpsr"] Sep 30 00:28:38 crc kubenswrapper[4809]: I0930 00:28:38.974906 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:38.999542 4809 scope.go:117] "RemoveContainer" containerID="6a708690ab6db92a5831a49baff934dfaea893e35040fc08dc056c48ebd3b150" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.019184 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/33bc071b-ab55-4bea-a4b2-351c18e716e7-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.019426 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33bc071b-ab55-4bea-a4b2-351c18e716e7-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.019500 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbnr5\" (UniqueName: \"kubernetes.io/projected/33bc071b-ab55-4bea-a4b2-351c18e716e7-kube-api-access-fbnr5\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.019599 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/33bc071b-ab55-4bea-a4b2-351c18e716e7-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.019872 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/33bc071b-ab55-4bea-a4b2-351c18e716e7-config\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.019970 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/33bc071b-ab55-4bea-a4b2-351c18e716e7-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.020048 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c58cec56-19b8-4689-9986-ba07a43850e3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c58cec56-19b8-4689-9986-ba07a43850e3\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.020116 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/33bc071b-ab55-4bea-a4b2-351c18e716e7-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.020191 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/33bc071b-ab55-4bea-a4b2-351c18e716e7-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.020293 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/33bc071b-ab55-4bea-a4b2-351c18e716e7-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.020382 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/33bc071b-ab55-4bea-a4b2-351c18e716e7-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.058954 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-cfdjx"] Sep 30 00:28:39 crc kubenswrapper[4809]: W0930 00:28:39.099616 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f8f7eac_270c_47bc_bfbf_acd03e83bdf6.slice/crio-b44b0babedc04bbc5de411bc6b3d510dec25bb7510ea99d7dbade15d9b095e30 WatchSource:0}: Error finding container b44b0babedc04bbc5de411bc6b3d510dec25bb7510ea99d7dbade15d9b095e30: Status 404 returned error can't find the container with id b44b0babedc04bbc5de411bc6b3d510dec25bb7510ea99d7dbade15d9b095e30 Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.121769 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/33bc071b-ab55-4bea-a4b2-351c18e716e7-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.121816 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/33bc071b-ab55-4bea-a4b2-351c18e716e7-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.121862 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/33bc071b-ab55-4bea-a4b2-351c18e716e7-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.121895 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33bc071b-ab55-4bea-a4b2-351c18e716e7-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.121917 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbnr5\" (UniqueName: \"kubernetes.io/projected/33bc071b-ab55-4bea-a4b2-351c18e716e7-kube-api-access-fbnr5\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.121960 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/33bc071b-ab55-4bea-a4b2-351c18e716e7-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.122042 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/33bc071b-ab55-4bea-a4b2-351c18e716e7-config\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.122071 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/33bc071b-ab55-4bea-a4b2-351c18e716e7-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.122096 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c58cec56-19b8-4689-9986-ba07a43850e3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c58cec56-19b8-4689-9986-ba07a43850e3\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.122118 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/33bc071b-ab55-4bea-a4b2-351c18e716e7-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.122134 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/33bc071b-ab55-4bea-a4b2-351c18e716e7-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.129318 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/33bc071b-ab55-4bea-a4b2-351c18e716e7-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.131756 4809 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.131781 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c58cec56-19b8-4689-9986-ba07a43850e3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c58cec56-19b8-4689-9986-ba07a43850e3\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/05ba422c963d7c6b869a3db801182b4e4c7098cadb23639abf6cf1f8f773ff1a/globalmount\"" pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.133155 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33bc071b-ab55-4bea-a4b2-351c18e716e7-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.133173 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/33bc071b-ab55-4bea-a4b2-351c18e716e7-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.133560 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/33bc071b-ab55-4bea-a4b2-351c18e716e7-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.133597 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/33bc071b-ab55-4bea-a4b2-351c18e716e7-config\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.138612 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/33bc071b-ab55-4bea-a4b2-351c18e716e7-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.140420 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/33bc071b-ab55-4bea-a4b2-351c18e716e7-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.140710 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/33bc071b-ab55-4bea-a4b2-351c18e716e7-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.147304 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/33bc071b-ab55-4bea-a4b2-351c18e716e7-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.188531 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c58cec56-19b8-4689-9986-ba07a43850e3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c58cec56-19b8-4689-9986-ba07a43850e3\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.204497 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbnr5\" (UniqueName: \"kubernetes.io/projected/33bc071b-ab55-4bea-a4b2-351c18e716e7-kube-api-access-fbnr5\") pod \"prometheus-metric-storage-0\" (UID: \"33bc071b-ab55-4bea-a4b2-351c18e716e7\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.233253 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.277272 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-d9jks"] Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.303958 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-sdjks"] Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.729038 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5681f926-bb52-4288-b0cc-ca30e087457d" path="/var/lib/kubelet/pods/5681f926-bb52-4288-b0cc-ca30e087457d/volumes" Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.764988 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-hcb4b"] Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.783509 4809 generic.go:334] "Generic (PLEG): container finished" podID="d346f7a2-3667-4066-975e-9b834ddffcfd" containerID="70b1eb2878e69129416b39fe579c3dd63925765bed63d5c8e87523c3e9be342b" exitCode=0 Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.783589 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-sdjks" event={"ID":"d346f7a2-3667-4066-975e-9b834ddffcfd","Type":"ContainerDied","Data":"70b1eb2878e69129416b39fe579c3dd63925765bed63d5c8e87523c3e9be342b"} Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.783611 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-sdjks" event={"ID":"d346f7a2-3667-4066-975e-9b834ddffcfd","Type":"ContainerStarted","Data":"534660bc44455e2b9383df2f4680b31967c8e853bee32a1d352ece42834d4c4e"} Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.786128 4809 generic.go:334] "Generic (PLEG): container finished" podID="7f8f7eac-270c-47bc-bfbf-acd03e83bdf6" containerID="ec0fe56f1b96f69fbf3f827f6a34e8d54debc382f7ea8c3dd3e1cbb208fd5e1e" exitCode=0 Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.786171 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-cfdjx" event={"ID":"7f8f7eac-270c-47bc-bfbf-acd03e83bdf6","Type":"ContainerDied","Data":"ec0fe56f1b96f69fbf3f827f6a34e8d54debc382f7ea8c3dd3e1cbb208fd5e1e"} Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.786209 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-cfdjx" event={"ID":"7f8f7eac-270c-47bc-bfbf-acd03e83bdf6","Type":"ContainerStarted","Data":"b44b0babedc04bbc5de411bc6b3d510dec25bb7510ea99d7dbade15d9b095e30"} Sep 30 00:28:39 crc kubenswrapper[4809]: W0930 00:28:39.789789 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod60d6da9e_f759_4557_bc7a_9e73d53728a2.slice/crio-7ededa0725a4ce2d8fdfa0d0fd15300450a593a838e21f7aba75afe0a5ffc5da WatchSource:0}: Error finding container 7ededa0725a4ce2d8fdfa0d0fd15300450a593a838e21f7aba75afe0a5ffc5da: Status 404 returned error can't find the container with id 7ededa0725a4ce2d8fdfa0d0fd15300450a593a838e21f7aba75afe0a5ffc5da Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.790455 4809 generic.go:334] "Generic (PLEG): container finished" podID="ef1a2ce9-936b-4adf-b07a-5790b5915b3d" containerID="938f9567f9f9502227e3745b31bad23d2fd1ff8abda15964aced4de503c54298" exitCode=0 Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.790516 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" event={"ID":"ef1a2ce9-936b-4adf-b07a-5790b5915b3d","Type":"ContainerDied","Data":"938f9567f9f9502227e3745b31bad23d2fd1ff8abda15964aced4de503c54298"} Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.790542 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" event={"ID":"ef1a2ce9-936b-4adf-b07a-5790b5915b3d","Type":"ContainerStarted","Data":"f16a969aed27a804e4df2b773d7283c69a179ad1b75fee5f65a4132b11eb7cc5"} Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.808071 4809 generic.go:334] "Generic (PLEG): container finished" podID="8f3d710a-877b-4d2b-922a-83aba52d6e9c" containerID="631225e9da2ec062bc44ad4fbf1975da64f2e0c2375d40b684dd7299e2e38a41" exitCode=0 Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.808118 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-shpsr" event={"ID":"8f3d710a-877b-4d2b-922a-83aba52d6e9c","Type":"ContainerDied","Data":"631225e9da2ec062bc44ad4fbf1975da64f2e0c2375d40b684dd7299e2e38a41"} Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.808166 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-shpsr" event={"ID":"8f3d710a-877b-4d2b-922a-83aba52d6e9c","Type":"ContainerStarted","Data":"b61a40ca7dfacf35425478f83f1ca93765db54d6a7bd0a742b86ac9a715e6efe"} Sep 30 00:28:39 crc kubenswrapper[4809]: I0930 00:28:39.907721 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 00:28:40 crc kubenswrapper[4809]: I0930 00:28:40.816926 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"33bc071b-ab55-4bea-a4b2-351c18e716e7","Type":"ContainerStarted","Data":"17381587e3c65aaedf9f59ba338187482db3ba8f5fb15ae8e7c03c0efcaebab0"} Sep 30 00:28:40 crc kubenswrapper[4809]: I0930 00:28:40.819821 4809 generic.go:334] "Generic (PLEG): container finished" podID="60d6da9e-f759-4557-bc7a-9e73d53728a2" containerID="2c0fce65679830ef0ff486d42dabedd6eaad41e4b2f4bc33f3c141b3046dceda" exitCode=0 Sep 30 00:28:40 crc kubenswrapper[4809]: I0930 00:28:40.819878 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-hcb4b" event={"ID":"60d6da9e-f759-4557-bc7a-9e73d53728a2","Type":"ContainerDied","Data":"2c0fce65679830ef0ff486d42dabedd6eaad41e4b2f4bc33f3c141b3046dceda"} Sep 30 00:28:40 crc kubenswrapper[4809]: I0930 00:28:40.819898 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-hcb4b" event={"ID":"60d6da9e-f759-4557-bc7a-9e73d53728a2","Type":"ContainerStarted","Data":"7ededa0725a4ce2d8fdfa0d0fd15300450a593a838e21f7aba75afe0a5ffc5da"} Sep 30 00:28:40 crc kubenswrapper[4809]: I0930 00:28:40.821724 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" event={"ID":"ef1a2ce9-936b-4adf-b07a-5790b5915b3d","Type":"ContainerStarted","Data":"c0107e37ee0d442f160e3fd69f2a7f09be50715d85729d6b4303f2185a6fe709"} Sep 30 00:28:40 crc kubenswrapper[4809]: I0930 00:28:40.928973 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" podStartSLOduration=2.928951246 podStartE2EDuration="2.928951246s" podCreationTimestamp="2025-09-30 00:28:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:28:40.868940029 +0000 UTC m=+1171.905189437" watchObservedRunningTime="2025-09-30 00:28:40.928951246 +0000 UTC m=+1171.965200654" Sep 30 00:28:40 crc kubenswrapper[4809]: I0930 00:28:40.932554 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6fe4-account-create-g7zd6"] Sep 30 00:28:40 crc kubenswrapper[4809]: I0930 00:28:40.933816 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6fe4-account-create-g7zd6" Sep 30 00:28:40 crc kubenswrapper[4809]: I0930 00:28:40.939795 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Sep 30 00:28:40 crc kubenswrapper[4809]: I0930 00:28:40.952958 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6fe4-account-create-g7zd6"] Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.078651 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgt6l\" (UniqueName: \"kubernetes.io/projected/d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4-kube-api-access-kgt6l\") pod \"keystone-6fe4-account-create-g7zd6\" (UID: \"d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4\") " pod="openstack/keystone-6fe4-account-create-g7zd6" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.163795 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-shpsr" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.180421 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgt6l\" (UniqueName: \"kubernetes.io/projected/d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4-kube-api-access-kgt6l\") pod \"keystone-6fe4-account-create-g7zd6\" (UID: \"d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4\") " pod="openstack/keystone-6fe4-account-create-g7zd6" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.218991 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgt6l\" (UniqueName: \"kubernetes.io/projected/d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4-kube-api-access-kgt6l\") pod \"keystone-6fe4-account-create-g7zd6\" (UID: \"d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4\") " pod="openstack/keystone-6fe4-account-create-g7zd6" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.268227 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6fe4-account-create-g7zd6" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.280333 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-186e-account-create-lt9zr"] Sep 30 00:28:41 crc kubenswrapper[4809]: E0930 00:28:41.280809 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f3d710a-877b-4d2b-922a-83aba52d6e9c" containerName="mariadb-database-create" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.280826 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f3d710a-877b-4d2b-922a-83aba52d6e9c" containerName="mariadb-database-create" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.281042 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f3d710a-877b-4d2b-922a-83aba52d6e9c" containerName="mariadb-database-create" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.281778 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzsdz\" (UniqueName: \"kubernetes.io/projected/8f3d710a-877b-4d2b-922a-83aba52d6e9c-kube-api-access-kzsdz\") pod \"8f3d710a-877b-4d2b-922a-83aba52d6e9c\" (UID: \"8f3d710a-877b-4d2b-922a-83aba52d6e9c\") " Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.282071 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-186e-account-create-lt9zr" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.286465 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.290386 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-186e-account-create-lt9zr"] Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.316857 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f3d710a-877b-4d2b-922a-83aba52d6e9c-kube-api-access-kzsdz" (OuterVolumeSpecName: "kube-api-access-kzsdz") pod "8f3d710a-877b-4d2b-922a-83aba52d6e9c" (UID: "8f3d710a-877b-4d2b-922a-83aba52d6e9c"). InnerVolumeSpecName "kube-api-access-kzsdz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.384824 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vx6ld\" (UniqueName: \"kubernetes.io/projected/22bc004f-362d-45a9-9643-b308efabecdf-kube-api-access-vx6ld\") pod \"placement-186e-account-create-lt9zr\" (UID: \"22bc004f-362d-45a9-9643-b308efabecdf\") " pod="openstack/placement-186e-account-create-lt9zr" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.385251 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzsdz\" (UniqueName: \"kubernetes.io/projected/8f3d710a-877b-4d2b-922a-83aba52d6e9c-kube-api-access-kzsdz\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.485829 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-bd76-account-create-bfxh6"] Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.486349 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vx6ld\" (UniqueName: \"kubernetes.io/projected/22bc004f-362d-45a9-9643-b308efabecdf-kube-api-access-vx6ld\") pod \"placement-186e-account-create-lt9zr\" (UID: \"22bc004f-362d-45a9-9643-b308efabecdf\") " pod="openstack/placement-186e-account-create-lt9zr" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.491867 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-bd76-account-create-bfxh6" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.493604 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.494020 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-sdjks" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.562214 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-bd76-account-create-bfxh6"] Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.592228 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4j6dn\" (UniqueName: \"kubernetes.io/projected/d346f7a2-3667-4066-975e-9b834ddffcfd-kube-api-access-4j6dn\") pod \"d346f7a2-3667-4066-975e-9b834ddffcfd\" (UID: \"d346f7a2-3667-4066-975e-9b834ddffcfd\") " Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.592877 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9slkw\" (UniqueName: \"kubernetes.io/projected/fd07d831-7a41-45a6-a4a5-abd8b40528d4-kube-api-access-9slkw\") pod \"glance-bd76-account-create-bfxh6\" (UID: \"fd07d831-7a41-45a6-a4a5-abd8b40528d4\") " pod="openstack/glance-bd76-account-create-bfxh6" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.599940 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-cfdjx" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.603388 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vx6ld\" (UniqueName: \"kubernetes.io/projected/22bc004f-362d-45a9-9643-b308efabecdf-kube-api-access-vx6ld\") pod \"placement-186e-account-create-lt9zr\" (UID: \"22bc004f-362d-45a9-9643-b308efabecdf\") " pod="openstack/placement-186e-account-create-lt9zr" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.611458 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d346f7a2-3667-4066-975e-9b834ddffcfd-kube-api-access-4j6dn" (OuterVolumeSpecName: "kube-api-access-4j6dn") pod "d346f7a2-3667-4066-975e-9b834ddffcfd" (UID: "d346f7a2-3667-4066-975e-9b834ddffcfd"). InnerVolumeSpecName "kube-api-access-4j6dn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.695705 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mfvfk\" (UniqueName: \"kubernetes.io/projected/7f8f7eac-270c-47bc-bfbf-acd03e83bdf6-kube-api-access-mfvfk\") pod \"7f8f7eac-270c-47bc-bfbf-acd03e83bdf6\" (UID: \"7f8f7eac-270c-47bc-bfbf-acd03e83bdf6\") " Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.704856 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f8f7eac-270c-47bc-bfbf-acd03e83bdf6-kube-api-access-mfvfk" (OuterVolumeSpecName: "kube-api-access-mfvfk") pod "7f8f7eac-270c-47bc-bfbf-acd03e83bdf6" (UID: "7f8f7eac-270c-47bc-bfbf-acd03e83bdf6"). InnerVolumeSpecName "kube-api-access-mfvfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.712783 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9slkw\" (UniqueName: \"kubernetes.io/projected/fd07d831-7a41-45a6-a4a5-abd8b40528d4-kube-api-access-9slkw\") pod \"glance-bd76-account-create-bfxh6\" (UID: \"fd07d831-7a41-45a6-a4a5-abd8b40528d4\") " pod="openstack/glance-bd76-account-create-bfxh6" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.722093 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mfvfk\" (UniqueName: \"kubernetes.io/projected/7f8f7eac-270c-47bc-bfbf-acd03e83bdf6-kube-api-access-mfvfk\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.722193 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4j6dn\" (UniqueName: \"kubernetes.io/projected/d346f7a2-3667-4066-975e-9b834ddffcfd-kube-api-access-4j6dn\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.746038 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9slkw\" (UniqueName: \"kubernetes.io/projected/fd07d831-7a41-45a6-a4a5-abd8b40528d4-kube-api-access-9slkw\") pod \"glance-bd76-account-create-bfxh6\" (UID: \"fd07d831-7a41-45a6-a4a5-abd8b40528d4\") " pod="openstack/glance-bd76-account-create-bfxh6" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.791085 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-186e-account-create-lt9zr" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.850343 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-sdjks" event={"ID":"d346f7a2-3667-4066-975e-9b834ddffcfd","Type":"ContainerDied","Data":"534660bc44455e2b9383df2f4680b31967c8e853bee32a1d352ece42834d4c4e"} Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.850381 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="534660bc44455e2b9383df2f4680b31967c8e853bee32a1d352ece42834d4c4e" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.850432 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-sdjks" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.855265 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-cfdjx" event={"ID":"7f8f7eac-270c-47bc-bfbf-acd03e83bdf6","Type":"ContainerDied","Data":"b44b0babedc04bbc5de411bc6b3d510dec25bb7510ea99d7dbade15d9b095e30"} Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.855306 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b44b0babedc04bbc5de411bc6b3d510dec25bb7510ea99d7dbade15d9b095e30" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.855679 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-cfdjx" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.861801 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-shpsr" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.862997 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-shpsr" event={"ID":"8f3d710a-877b-4d2b-922a-83aba52d6e9c","Type":"ContainerDied","Data":"b61a40ca7dfacf35425478f83f1ca93765db54d6a7bd0a742b86ac9a715e6efe"} Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.863028 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b61a40ca7dfacf35425478f83f1ca93765db54d6a7bd0a742b86ac9a715e6efe" Sep 30 00:28:41 crc kubenswrapper[4809]: I0930 00:28:41.863719 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:42 crc kubenswrapper[4809]: I0930 00:28:42.002125 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-bd76-account-create-bfxh6" Sep 30 00:28:42 crc kubenswrapper[4809]: I0930 00:28:42.092189 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6fe4-account-create-g7zd6"] Sep 30 00:28:42 crc kubenswrapper[4809]: I0930 00:28:42.253060 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-186e-account-create-lt9zr"] Sep 30 00:28:42 crc kubenswrapper[4809]: I0930 00:28:42.499386 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-bd76-account-create-bfxh6"] Sep 30 00:28:42 crc kubenswrapper[4809]: W0930 00:28:42.508908 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd07d831_7a41_45a6_a4a5_abd8b40528d4.slice/crio-4310b79c50b9883794bdd8b44504550f47ed0e8afbdf367beda5754ee166667c WatchSource:0}: Error finding container 4310b79c50b9883794bdd8b44504550f47ed0e8afbdf367beda5754ee166667c: Status 404 returned error can't find the container with id 4310b79c50b9883794bdd8b44504550f47ed0e8afbdf367beda5754ee166667c Sep 30 00:28:42 crc kubenswrapper[4809]: I0930 00:28:42.823666 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-hcb4b" Sep 30 00:28:42 crc kubenswrapper[4809]: I0930 00:28:42.876480 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-bd76-account-create-bfxh6" event={"ID":"fd07d831-7a41-45a6-a4a5-abd8b40528d4","Type":"ContainerStarted","Data":"4310b79c50b9883794bdd8b44504550f47ed0e8afbdf367beda5754ee166667c"} Sep 30 00:28:42 crc kubenswrapper[4809]: I0930 00:28:42.878678 4809 generic.go:334] "Generic (PLEG): container finished" podID="d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4" containerID="a9b9df5168f52c550629419bd8bccb7fe9c40af7e04940ddf6a1d377bed54178" exitCode=0 Sep 30 00:28:42 crc kubenswrapper[4809]: I0930 00:28:42.878719 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6fe4-account-create-g7zd6" event={"ID":"d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4","Type":"ContainerDied","Data":"a9b9df5168f52c550629419bd8bccb7fe9c40af7e04940ddf6a1d377bed54178"} Sep 30 00:28:42 crc kubenswrapper[4809]: I0930 00:28:42.878740 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6fe4-account-create-g7zd6" event={"ID":"d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4","Type":"ContainerStarted","Data":"9ded5d97e57d16385406e44b99fc4e301fcbaab08cfd17770a9b8fd16f322fdb"} Sep 30 00:28:42 crc kubenswrapper[4809]: I0930 00:28:42.887949 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-hcb4b" event={"ID":"60d6da9e-f759-4557-bc7a-9e73d53728a2","Type":"ContainerDied","Data":"7ededa0725a4ce2d8fdfa0d0fd15300450a593a838e21f7aba75afe0a5ffc5da"} Sep 30 00:28:42 crc kubenswrapper[4809]: I0930 00:28:42.887989 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ededa0725a4ce2d8fdfa0d0fd15300450a593a838e21f7aba75afe0a5ffc5da" Sep 30 00:28:42 crc kubenswrapper[4809]: I0930 00:28:42.888156 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-hcb4b" Sep 30 00:28:42 crc kubenswrapper[4809]: I0930 00:28:42.892636 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-186e-account-create-lt9zr" event={"ID":"22bc004f-362d-45a9-9643-b308efabecdf","Type":"ContainerStarted","Data":"08a3323b5642c9b71f052f0296ac6367f5cb74fd74393bbb748a67601ed6aff7"} Sep 30 00:28:42 crc kubenswrapper[4809]: I0930 00:28:42.894549 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"33bc071b-ab55-4bea-a4b2-351c18e716e7","Type":"ContainerStarted","Data":"2c9aa7cc12bfc841a337cfc71a1f3e760d4059af076e2895731338412cdc1f5d"} Sep 30 00:28:42 crc kubenswrapper[4809]: I0930 00:28:42.956599 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6xjj\" (UniqueName: \"kubernetes.io/projected/60d6da9e-f759-4557-bc7a-9e73d53728a2-kube-api-access-l6xjj\") pod \"60d6da9e-f759-4557-bc7a-9e73d53728a2\" (UID: \"60d6da9e-f759-4557-bc7a-9e73d53728a2\") " Sep 30 00:28:42 crc kubenswrapper[4809]: I0930 00:28:42.962165 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60d6da9e-f759-4557-bc7a-9e73d53728a2-kube-api-access-l6xjj" (OuterVolumeSpecName: "kube-api-access-l6xjj") pod "60d6da9e-f759-4557-bc7a-9e73d53728a2" (UID: "60d6da9e-f759-4557-bc7a-9e73d53728a2"). InnerVolumeSpecName "kube-api-access-l6xjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:43 crc kubenswrapper[4809]: I0930 00:28:43.059418 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6xjj\" (UniqueName: \"kubernetes.io/projected/60d6da9e-f759-4557-bc7a-9e73d53728a2-kube-api-access-l6xjj\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:43 crc kubenswrapper[4809]: I0930 00:28:43.576061 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mysqld-exporter-605e-account-create-27tv4"] Sep 30 00:28:43 crc kubenswrapper[4809]: E0930 00:28:43.576824 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60d6da9e-f759-4557-bc7a-9e73d53728a2" containerName="mariadb-database-create" Sep 30 00:28:43 crc kubenswrapper[4809]: I0930 00:28:43.576845 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="60d6da9e-f759-4557-bc7a-9e73d53728a2" containerName="mariadb-database-create" Sep 30 00:28:43 crc kubenswrapper[4809]: E0930 00:28:43.576872 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d346f7a2-3667-4066-975e-9b834ddffcfd" containerName="mariadb-database-create" Sep 30 00:28:43 crc kubenswrapper[4809]: I0930 00:28:43.576882 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="d346f7a2-3667-4066-975e-9b834ddffcfd" containerName="mariadb-database-create" Sep 30 00:28:43 crc kubenswrapper[4809]: E0930 00:28:43.576914 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8f7eac-270c-47bc-bfbf-acd03e83bdf6" containerName="mariadb-database-create" Sep 30 00:28:43 crc kubenswrapper[4809]: I0930 00:28:43.576923 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8f7eac-270c-47bc-bfbf-acd03e83bdf6" containerName="mariadb-database-create" Sep 30 00:28:43 crc kubenswrapper[4809]: I0930 00:28:43.577142 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f8f7eac-270c-47bc-bfbf-acd03e83bdf6" containerName="mariadb-database-create" Sep 30 00:28:43 crc kubenswrapper[4809]: I0930 00:28:43.577157 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="60d6da9e-f759-4557-bc7a-9e73d53728a2" containerName="mariadb-database-create" Sep 30 00:28:43 crc kubenswrapper[4809]: I0930 00:28:43.577180 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="d346f7a2-3667-4066-975e-9b834ddffcfd" containerName="mariadb-database-create" Sep 30 00:28:43 crc kubenswrapper[4809]: I0930 00:28:43.581936 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-605e-account-create-27tv4" Sep 30 00:28:43 crc kubenswrapper[4809]: I0930 00:28:43.585333 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-605e-account-create-27tv4"] Sep 30 00:28:43 crc kubenswrapper[4809]: I0930 00:28:43.587148 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"mysqld-exporter-openstack-cell1-db-secret" Sep 30 00:28:43 crc kubenswrapper[4809]: I0930 00:28:43.771937 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgsb6\" (UniqueName: \"kubernetes.io/projected/94631e9e-696e-4dcb-b06e-386d66459e6f-kube-api-access-vgsb6\") pod \"mysqld-exporter-605e-account-create-27tv4\" (UID: \"94631e9e-696e-4dcb-b06e-386d66459e6f\") " pod="openstack/mysqld-exporter-605e-account-create-27tv4" Sep 30 00:28:43 crc kubenswrapper[4809]: I0930 00:28:43.875988 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgsb6\" (UniqueName: \"kubernetes.io/projected/94631e9e-696e-4dcb-b06e-386d66459e6f-kube-api-access-vgsb6\") pod \"mysqld-exporter-605e-account-create-27tv4\" (UID: \"94631e9e-696e-4dcb-b06e-386d66459e6f\") " pod="openstack/mysqld-exporter-605e-account-create-27tv4" Sep 30 00:28:43 crc kubenswrapper[4809]: I0930 00:28:43.900875 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgsb6\" (UniqueName: \"kubernetes.io/projected/94631e9e-696e-4dcb-b06e-386d66459e6f-kube-api-access-vgsb6\") pod \"mysqld-exporter-605e-account-create-27tv4\" (UID: \"94631e9e-696e-4dcb-b06e-386d66459e6f\") " pod="openstack/mysqld-exporter-605e-account-create-27tv4" Sep 30 00:28:43 crc kubenswrapper[4809]: I0930 00:28:43.911186 4809 generic.go:334] "Generic (PLEG): container finished" podID="22bc004f-362d-45a9-9643-b308efabecdf" containerID="946b7d706704df502abc7ba17547fd033dd5262b5588dfbd2a1e1eb3a71cd424" exitCode=0 Sep 30 00:28:43 crc kubenswrapper[4809]: I0930 00:28:43.911323 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-186e-account-create-lt9zr" event={"ID":"22bc004f-362d-45a9-9643-b308efabecdf","Type":"ContainerDied","Data":"946b7d706704df502abc7ba17547fd033dd5262b5588dfbd2a1e1eb3a71cd424"} Sep 30 00:28:43 crc kubenswrapper[4809]: I0930 00:28:43.913910 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-605e-account-create-27tv4" Sep 30 00:28:43 crc kubenswrapper[4809]: I0930 00:28:43.914285 4809 generic.go:334] "Generic (PLEG): container finished" podID="fd07d831-7a41-45a6-a4a5-abd8b40528d4" containerID="47124f3ee9666dc743709b2ae189206e5cdf8cdffcc0ec8f9dbcbb7d9e3866c5" exitCode=0 Sep 30 00:28:43 crc kubenswrapper[4809]: I0930 00:28:43.914412 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-bd76-account-create-bfxh6" event={"ID":"fd07d831-7a41-45a6-a4a5-abd8b40528d4","Type":"ContainerDied","Data":"47124f3ee9666dc743709b2ae189206e5cdf8cdffcc0ec8f9dbcbb7d9e3866c5"} Sep 30 00:28:44 crc kubenswrapper[4809]: I0930 00:28:44.294596 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6fe4-account-create-g7zd6" Sep 30 00:28:44 crc kubenswrapper[4809]: I0930 00:28:44.420945 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-605e-account-create-27tv4"] Sep 30 00:28:44 crc kubenswrapper[4809]: I0930 00:28:44.486103 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgt6l\" (UniqueName: \"kubernetes.io/projected/d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4-kube-api-access-kgt6l\") pod \"d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4\" (UID: \"d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4\") " Sep 30 00:28:44 crc kubenswrapper[4809]: I0930 00:28:44.491250 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4-kube-api-access-kgt6l" (OuterVolumeSpecName: "kube-api-access-kgt6l") pod "d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4" (UID: "d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4"). InnerVolumeSpecName "kube-api-access-kgt6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:44 crc kubenswrapper[4809]: I0930 00:28:44.588539 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgt6l\" (UniqueName: \"kubernetes.io/projected/d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4-kube-api-access-kgt6l\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:44 crc kubenswrapper[4809]: I0930 00:28:44.923369 4809 generic.go:334] "Generic (PLEG): container finished" podID="94631e9e-696e-4dcb-b06e-386d66459e6f" containerID="1cc8266f7afe5c3cb62635feaa024dd9bb9c72918aafa1e92e52dd4ff52d7b69" exitCode=0 Sep 30 00:28:44 crc kubenswrapper[4809]: I0930 00:28:44.923412 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-605e-account-create-27tv4" event={"ID":"94631e9e-696e-4dcb-b06e-386d66459e6f","Type":"ContainerDied","Data":"1cc8266f7afe5c3cb62635feaa024dd9bb9c72918aafa1e92e52dd4ff52d7b69"} Sep 30 00:28:44 crc kubenswrapper[4809]: I0930 00:28:44.923454 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-605e-account-create-27tv4" event={"ID":"94631e9e-696e-4dcb-b06e-386d66459e6f","Type":"ContainerStarted","Data":"f2fc943c48e4a91eccfecce22f07ca385cbdc8fff387c243e395c474fc8b8944"} Sep 30 00:28:44 crc kubenswrapper[4809]: I0930 00:28:44.924875 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6fe4-account-create-g7zd6" event={"ID":"d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4","Type":"ContainerDied","Data":"9ded5d97e57d16385406e44b99fc4e301fcbaab08cfd17770a9b8fd16f322fdb"} Sep 30 00:28:44 crc kubenswrapper[4809]: I0930 00:28:44.924904 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6fe4-account-create-g7zd6" Sep 30 00:28:44 crc kubenswrapper[4809]: I0930 00:28:44.924917 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ded5d97e57d16385406e44b99fc4e301fcbaab08cfd17770a9b8fd16f322fdb" Sep 30 00:28:45 crc kubenswrapper[4809]: I0930 00:28:45.231650 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-186e-account-create-lt9zr" Sep 30 00:28:45 crc kubenswrapper[4809]: I0930 00:28:45.350739 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-bd76-account-create-bfxh6" Sep 30 00:28:45 crc kubenswrapper[4809]: I0930 00:28:45.403656 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vx6ld\" (UniqueName: \"kubernetes.io/projected/22bc004f-362d-45a9-9643-b308efabecdf-kube-api-access-vx6ld\") pod \"22bc004f-362d-45a9-9643-b308efabecdf\" (UID: \"22bc004f-362d-45a9-9643-b308efabecdf\") " Sep 30 00:28:45 crc kubenswrapper[4809]: I0930 00:28:45.413117 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22bc004f-362d-45a9-9643-b308efabecdf-kube-api-access-vx6ld" (OuterVolumeSpecName: "kube-api-access-vx6ld") pod "22bc004f-362d-45a9-9643-b308efabecdf" (UID: "22bc004f-362d-45a9-9643-b308efabecdf"). InnerVolumeSpecName "kube-api-access-vx6ld". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:45 crc kubenswrapper[4809]: I0930 00:28:45.505693 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9slkw\" (UniqueName: \"kubernetes.io/projected/fd07d831-7a41-45a6-a4a5-abd8b40528d4-kube-api-access-9slkw\") pod \"fd07d831-7a41-45a6-a4a5-abd8b40528d4\" (UID: \"fd07d831-7a41-45a6-a4a5-abd8b40528d4\") " Sep 30 00:28:45 crc kubenswrapper[4809]: I0930 00:28:45.506235 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vx6ld\" (UniqueName: \"kubernetes.io/projected/22bc004f-362d-45a9-9643-b308efabecdf-kube-api-access-vx6ld\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:45 crc kubenswrapper[4809]: I0930 00:28:45.509493 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd07d831-7a41-45a6-a4a5-abd8b40528d4-kube-api-access-9slkw" (OuterVolumeSpecName: "kube-api-access-9slkw") pod "fd07d831-7a41-45a6-a4a5-abd8b40528d4" (UID: "fd07d831-7a41-45a6-a4a5-abd8b40528d4"). InnerVolumeSpecName "kube-api-access-9slkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:45 crc kubenswrapper[4809]: I0930 00:28:45.607326 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9slkw\" (UniqueName: \"kubernetes.io/projected/fd07d831-7a41-45a6-a4a5-abd8b40528d4-kube-api-access-9slkw\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:45 crc kubenswrapper[4809]: I0930 00:28:45.943029 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-bd76-account-create-bfxh6" event={"ID":"fd07d831-7a41-45a6-a4a5-abd8b40528d4","Type":"ContainerDied","Data":"4310b79c50b9883794bdd8b44504550f47ed0e8afbdf367beda5754ee166667c"} Sep 30 00:28:45 crc kubenswrapper[4809]: I0930 00:28:45.943078 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4310b79c50b9883794bdd8b44504550f47ed0e8afbdf367beda5754ee166667c" Sep 30 00:28:45 crc kubenswrapper[4809]: I0930 00:28:45.943157 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-bd76-account-create-bfxh6" Sep 30 00:28:45 crc kubenswrapper[4809]: I0930 00:28:45.947308 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-186e-account-create-lt9zr" event={"ID":"22bc004f-362d-45a9-9643-b308efabecdf","Type":"ContainerDied","Data":"08a3323b5642c9b71f052f0296ac6367f5cb74fd74393bbb748a67601ed6aff7"} Sep 30 00:28:45 crc kubenswrapper[4809]: I0930 00:28:45.947353 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08a3323b5642c9b71f052f0296ac6367f5cb74fd74393bbb748a67601ed6aff7" Sep 30 00:28:45 crc kubenswrapper[4809]: I0930 00:28:45.947654 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-186e-account-create-lt9zr" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.374445 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-605e-account-create-27tv4" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.540423 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vgsb6\" (UniqueName: \"kubernetes.io/projected/94631e9e-696e-4dcb-b06e-386d66459e6f-kube-api-access-vgsb6\") pod \"94631e9e-696e-4dcb-b06e-386d66459e6f\" (UID: \"94631e9e-696e-4dcb-b06e-386d66459e6f\") " Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.545071 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94631e9e-696e-4dcb-b06e-386d66459e6f-kube-api-access-vgsb6" (OuterVolumeSpecName: "kube-api-access-vgsb6") pod "94631e9e-696e-4dcb-b06e-386d66459e6f" (UID: "94631e9e-696e-4dcb-b06e-386d66459e6f"). InnerVolumeSpecName "kube-api-access-vgsb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.597100 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-j2xxj"] Sep 30 00:28:46 crc kubenswrapper[4809]: E0930 00:28:46.597477 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22bc004f-362d-45a9-9643-b308efabecdf" containerName="mariadb-account-create" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.597501 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="22bc004f-362d-45a9-9643-b308efabecdf" containerName="mariadb-account-create" Sep 30 00:28:46 crc kubenswrapper[4809]: E0930 00:28:46.597524 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd07d831-7a41-45a6-a4a5-abd8b40528d4" containerName="mariadb-account-create" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.597532 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd07d831-7a41-45a6-a4a5-abd8b40528d4" containerName="mariadb-account-create" Sep 30 00:28:46 crc kubenswrapper[4809]: E0930 00:28:46.597552 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4" containerName="mariadb-account-create" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.597560 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4" containerName="mariadb-account-create" Sep 30 00:28:46 crc kubenswrapper[4809]: E0930 00:28:46.597590 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94631e9e-696e-4dcb-b06e-386d66459e6f" containerName="mariadb-account-create" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.597598 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="94631e9e-696e-4dcb-b06e-386d66459e6f" containerName="mariadb-account-create" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.597828 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="94631e9e-696e-4dcb-b06e-386d66459e6f" containerName="mariadb-account-create" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.597844 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd07d831-7a41-45a6-a4a5-abd8b40528d4" containerName="mariadb-account-create" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.597857 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="22bc004f-362d-45a9-9643-b308efabecdf" containerName="mariadb-account-create" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.597869 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4" containerName="mariadb-account-create" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.598633 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-j2xxj" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.601235 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.601398 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.601570 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-8vz6r" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.602918 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.607905 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-j2xxj"] Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.642791 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e71457ea-c5a1-4a8b-8524-68181a838ffd-config-data\") pod \"keystone-db-sync-j2xxj\" (UID: \"e71457ea-c5a1-4a8b-8524-68181a838ffd\") " pod="openstack/keystone-db-sync-j2xxj" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.643007 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e71457ea-c5a1-4a8b-8524-68181a838ffd-combined-ca-bundle\") pod \"keystone-db-sync-j2xxj\" (UID: \"e71457ea-c5a1-4a8b-8524-68181a838ffd\") " pod="openstack/keystone-db-sync-j2xxj" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.643120 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpjfl\" (UniqueName: \"kubernetes.io/projected/e71457ea-c5a1-4a8b-8524-68181a838ffd-kube-api-access-fpjfl\") pod \"keystone-db-sync-j2xxj\" (UID: \"e71457ea-c5a1-4a8b-8524-68181a838ffd\") " pod="openstack/keystone-db-sync-j2xxj" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.643312 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vgsb6\" (UniqueName: \"kubernetes.io/projected/94631e9e-696e-4dcb-b06e-386d66459e6f-kube-api-access-vgsb6\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.695408 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-lnq6w"] Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.696493 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-lnq6w" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.698274 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.698487 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-dxw52" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.711552 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-lnq6w"] Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.744614 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3c04595-d3a8-4477-9daf-46b43a8750dd-combined-ca-bundle\") pod \"glance-db-sync-lnq6w\" (UID: \"e3c04595-d3a8-4477-9daf-46b43a8750dd\") " pod="openstack/glance-db-sync-lnq6w" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.744719 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e71457ea-c5a1-4a8b-8524-68181a838ffd-config-data\") pod \"keystone-db-sync-j2xxj\" (UID: \"e71457ea-c5a1-4a8b-8524-68181a838ffd\") " pod="openstack/keystone-db-sync-j2xxj" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.744747 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3c04595-d3a8-4477-9daf-46b43a8750dd-config-data\") pod \"glance-db-sync-lnq6w\" (UID: \"e3c04595-d3a8-4477-9daf-46b43a8750dd\") " pod="openstack/glance-db-sync-lnq6w" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.744773 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e71457ea-c5a1-4a8b-8524-68181a838ffd-combined-ca-bundle\") pod \"keystone-db-sync-j2xxj\" (UID: \"e71457ea-c5a1-4a8b-8524-68181a838ffd\") " pod="openstack/keystone-db-sync-j2xxj" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.744814 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpjfl\" (UniqueName: \"kubernetes.io/projected/e71457ea-c5a1-4a8b-8524-68181a838ffd-kube-api-access-fpjfl\") pod \"keystone-db-sync-j2xxj\" (UID: \"e71457ea-c5a1-4a8b-8524-68181a838ffd\") " pod="openstack/keystone-db-sync-j2xxj" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.744836 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e3c04595-d3a8-4477-9daf-46b43a8750dd-db-sync-config-data\") pod \"glance-db-sync-lnq6w\" (UID: \"e3c04595-d3a8-4477-9daf-46b43a8750dd\") " pod="openstack/glance-db-sync-lnq6w" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.744859 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47xf5\" (UniqueName: \"kubernetes.io/projected/e3c04595-d3a8-4477-9daf-46b43a8750dd-kube-api-access-47xf5\") pod \"glance-db-sync-lnq6w\" (UID: \"e3c04595-d3a8-4477-9daf-46b43a8750dd\") " pod="openstack/glance-db-sync-lnq6w" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.757285 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e71457ea-c5a1-4a8b-8524-68181a838ffd-config-data\") pod \"keystone-db-sync-j2xxj\" (UID: \"e71457ea-c5a1-4a8b-8524-68181a838ffd\") " pod="openstack/keystone-db-sync-j2xxj" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.760246 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e71457ea-c5a1-4a8b-8524-68181a838ffd-combined-ca-bundle\") pod \"keystone-db-sync-j2xxj\" (UID: \"e71457ea-c5a1-4a8b-8524-68181a838ffd\") " pod="openstack/keystone-db-sync-j2xxj" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.760916 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpjfl\" (UniqueName: \"kubernetes.io/projected/e71457ea-c5a1-4a8b-8524-68181a838ffd-kube-api-access-fpjfl\") pod \"keystone-db-sync-j2xxj\" (UID: \"e71457ea-c5a1-4a8b-8524-68181a838ffd\") " pod="openstack/keystone-db-sync-j2xxj" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.846337 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47xf5\" (UniqueName: \"kubernetes.io/projected/e3c04595-d3a8-4477-9daf-46b43a8750dd-kube-api-access-47xf5\") pod \"glance-db-sync-lnq6w\" (UID: \"e3c04595-d3a8-4477-9daf-46b43a8750dd\") " pod="openstack/glance-db-sync-lnq6w" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.846757 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3c04595-d3a8-4477-9daf-46b43a8750dd-combined-ca-bundle\") pod \"glance-db-sync-lnq6w\" (UID: \"e3c04595-d3a8-4477-9daf-46b43a8750dd\") " pod="openstack/glance-db-sync-lnq6w" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.846978 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3c04595-d3a8-4477-9daf-46b43a8750dd-config-data\") pod \"glance-db-sync-lnq6w\" (UID: \"e3c04595-d3a8-4477-9daf-46b43a8750dd\") " pod="openstack/glance-db-sync-lnq6w" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.847152 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e3c04595-d3a8-4477-9daf-46b43a8750dd-db-sync-config-data\") pod \"glance-db-sync-lnq6w\" (UID: \"e3c04595-d3a8-4477-9daf-46b43a8750dd\") " pod="openstack/glance-db-sync-lnq6w" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.855864 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e3c04595-d3a8-4477-9daf-46b43a8750dd-db-sync-config-data\") pod \"glance-db-sync-lnq6w\" (UID: \"e3c04595-d3a8-4477-9daf-46b43a8750dd\") " pod="openstack/glance-db-sync-lnq6w" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.856340 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3c04595-d3a8-4477-9daf-46b43a8750dd-config-data\") pod \"glance-db-sync-lnq6w\" (UID: \"e3c04595-d3a8-4477-9daf-46b43a8750dd\") " pod="openstack/glance-db-sync-lnq6w" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.858382 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3c04595-d3a8-4477-9daf-46b43a8750dd-combined-ca-bundle\") pod \"glance-db-sync-lnq6w\" (UID: \"e3c04595-d3a8-4477-9daf-46b43a8750dd\") " pod="openstack/glance-db-sync-lnq6w" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.862069 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47xf5\" (UniqueName: \"kubernetes.io/projected/e3c04595-d3a8-4477-9daf-46b43a8750dd-kube-api-access-47xf5\") pod \"glance-db-sync-lnq6w\" (UID: \"e3c04595-d3a8-4477-9daf-46b43a8750dd\") " pod="openstack/glance-db-sync-lnq6w" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.916741 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-j2xxj" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.961363 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-605e-account-create-27tv4" event={"ID":"94631e9e-696e-4dcb-b06e-386d66459e6f","Type":"ContainerDied","Data":"f2fc943c48e4a91eccfecce22f07ca385cbdc8fff387c243e395c474fc8b8944"} Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.961400 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2fc943c48e4a91eccfecce22f07ca385cbdc8fff387c243e395c474fc8b8944" Sep 30 00:28:46 crc kubenswrapper[4809]: I0930 00:28:46.961449 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-605e-account-create-27tv4" Sep 30 00:28:47 crc kubenswrapper[4809]: I0930 00:28:47.013697 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-lnq6w" Sep 30 00:28:47 crc kubenswrapper[4809]: I0930 00:28:47.595914 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-j2xxj"] Sep 30 00:28:47 crc kubenswrapper[4809]: W0930 00:28:47.599040 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode71457ea_c5a1_4a8b_8524_68181a838ffd.slice/crio-ea13d64d565db160566e7f4f3ea92b015205fa2fbee0807e0d687630693d928a WatchSource:0}: Error finding container ea13d64d565db160566e7f4f3ea92b015205fa2fbee0807e0d687630693d928a: Status 404 returned error can't find the container with id ea13d64d565db160566e7f4f3ea92b015205fa2fbee0807e0d687630693d928a Sep 30 00:28:47 crc kubenswrapper[4809]: W0930 00:28:47.613569 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode3c04595_d3a8_4477_9daf_46b43a8750dd.slice/crio-84f6a3e93ce5a9853700b32df5dba96f16a2ab9a37917e887c738523daa19d79 WatchSource:0}: Error finding container 84f6a3e93ce5a9853700b32df5dba96f16a2ab9a37917e887c738523daa19d79: Status 404 returned error can't find the container with id 84f6a3e93ce5a9853700b32df5dba96f16a2ab9a37917e887c738523daa19d79 Sep 30 00:28:47 crc kubenswrapper[4809]: I0930 00:28:47.615603 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-lnq6w"] Sep 30 00:28:47 crc kubenswrapper[4809]: I0930 00:28:47.823749 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-c98f-account-create-b2zgz"] Sep 30 00:28:47 crc kubenswrapper[4809]: I0930 00:28:47.825032 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c98f-account-create-b2zgz" Sep 30 00:28:47 crc kubenswrapper[4809]: I0930 00:28:47.826830 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Sep 30 00:28:47 crc kubenswrapper[4809]: I0930 00:28:47.833144 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-c98f-account-create-b2zgz"] Sep 30 00:28:47 crc kubenswrapper[4809]: I0930 00:28:47.920358 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-491f-account-create-fwgpd"] Sep 30 00:28:47 crc kubenswrapper[4809]: I0930 00:28:47.922258 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-491f-account-create-fwgpd" Sep 30 00:28:47 crc kubenswrapper[4809]: I0930 00:28:47.925811 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Sep 30 00:28:47 crc kubenswrapper[4809]: I0930 00:28:47.933320 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-491f-account-create-fwgpd"] Sep 30 00:28:47 crc kubenswrapper[4809]: I0930 00:28:47.963317 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fppg\" (UniqueName: \"kubernetes.io/projected/eb21eb26-8a42-4a7c-ace5-392c2284e0e5-kube-api-access-4fppg\") pod \"cinder-c98f-account-create-b2zgz\" (UID: \"eb21eb26-8a42-4a7c-ace5-392c2284e0e5\") " pod="openstack/cinder-c98f-account-create-b2zgz" Sep 30 00:28:47 crc kubenswrapper[4809]: I0930 00:28:47.971520 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-lnq6w" event={"ID":"e3c04595-d3a8-4477-9daf-46b43a8750dd","Type":"ContainerStarted","Data":"84f6a3e93ce5a9853700b32df5dba96f16a2ab9a37917e887c738523daa19d79"} Sep 30 00:28:47 crc kubenswrapper[4809]: I0930 00:28:47.972903 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-j2xxj" event={"ID":"e71457ea-c5a1-4a8b-8524-68181a838ffd","Type":"ContainerStarted","Data":"ea13d64d565db160566e7f4f3ea92b015205fa2fbee0807e0d687630693d928a"} Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.065434 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fppg\" (UniqueName: \"kubernetes.io/projected/eb21eb26-8a42-4a7c-ace5-392c2284e0e5-kube-api-access-4fppg\") pod \"cinder-c98f-account-create-b2zgz\" (UID: \"eb21eb26-8a42-4a7c-ace5-392c2284e0e5\") " pod="openstack/cinder-c98f-account-create-b2zgz" Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.066221 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w66nr\" (UniqueName: \"kubernetes.io/projected/70c3a309-0bd5-4672-bfeb-876af5a894ca-kube-api-access-w66nr\") pod \"barbican-491f-account-create-fwgpd\" (UID: \"70c3a309-0bd5-4672-bfeb-876af5a894ca\") " pod="openstack/barbican-491f-account-create-fwgpd" Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.089434 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fppg\" (UniqueName: \"kubernetes.io/projected/eb21eb26-8a42-4a7c-ace5-392c2284e0e5-kube-api-access-4fppg\") pod \"cinder-c98f-account-create-b2zgz\" (UID: \"eb21eb26-8a42-4a7c-ace5-392c2284e0e5\") " pod="openstack/cinder-c98f-account-create-b2zgz" Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.141443 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c98f-account-create-b2zgz" Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.150379 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-f22f-account-create-ctshj"] Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.152818 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-f22f-account-create-ctshj" Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.159396 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.164963 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-f22f-account-create-ctshj"] Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.167867 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w66nr\" (UniqueName: \"kubernetes.io/projected/70c3a309-0bd5-4672-bfeb-876af5a894ca-kube-api-access-w66nr\") pod \"barbican-491f-account-create-fwgpd\" (UID: \"70c3a309-0bd5-4672-bfeb-876af5a894ca\") " pod="openstack/barbican-491f-account-create-fwgpd" Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.190421 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w66nr\" (UniqueName: \"kubernetes.io/projected/70c3a309-0bd5-4672-bfeb-876af5a894ca-kube-api-access-w66nr\") pod \"barbican-491f-account-create-fwgpd\" (UID: \"70c3a309-0bd5-4672-bfeb-876af5a894ca\") " pod="openstack/barbican-491f-account-create-fwgpd" Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.259951 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-491f-account-create-fwgpd" Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.269849 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rj2c4\" (UniqueName: \"kubernetes.io/projected/54c601fe-b3c3-40ee-bf5d-6f15b4535a22-kube-api-access-rj2c4\") pod \"heat-f22f-account-create-ctshj\" (UID: \"54c601fe-b3c3-40ee-bf5d-6f15b4535a22\") " pod="openstack/heat-f22f-account-create-ctshj" Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.372374 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rj2c4\" (UniqueName: \"kubernetes.io/projected/54c601fe-b3c3-40ee-bf5d-6f15b4535a22-kube-api-access-rj2c4\") pod \"heat-f22f-account-create-ctshj\" (UID: \"54c601fe-b3c3-40ee-bf5d-6f15b4535a22\") " pod="openstack/heat-f22f-account-create-ctshj" Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.396457 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rj2c4\" (UniqueName: \"kubernetes.io/projected/54c601fe-b3c3-40ee-bf5d-6f15b4535a22-kube-api-access-rj2c4\") pod \"heat-f22f-account-create-ctshj\" (UID: \"54c601fe-b3c3-40ee-bf5d-6f15b4535a22\") " pod="openstack/heat-f22f-account-create-ctshj" Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.553297 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-f22f-account-create-ctshj" Sep 30 00:28:48 crc kubenswrapper[4809]: W0930 00:28:48.612986 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb21eb26_8a42_4a7c_ace5_392c2284e0e5.slice/crio-b3fea8b9cab70f837d2d2dbfb8067983f38cbe15918117aec06538a7c1838f70 WatchSource:0}: Error finding container b3fea8b9cab70f837d2d2dbfb8067983f38cbe15918117aec06538a7c1838f70: Status 404 returned error can't find the container with id b3fea8b9cab70f837d2d2dbfb8067983f38cbe15918117aec06538a7c1838f70 Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.613372 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-c98f-account-create-b2zgz"] Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.687030 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.735398 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mysqld-exporter-0"] Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.760495 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-0" Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.765246 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"mysqld-exporter-config-data" Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.793911 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-0"] Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.833930 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-491f-account-create-fwgpd"] Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.864391 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-gj5gk"] Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.864574 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-gj5gk" podUID="25170433-57de-43a1-8459-8c2e31c42ffb" containerName="dnsmasq-dns" containerID="cri-o://a1a656863eb1d851d27c31fec10297b8a907f45be6450a97c40e34ba174e61fb" gracePeriod=10 Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.891300 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1097b4e8-b93a-4350-b74c-acf37be3a84f-combined-ca-bundle\") pod \"mysqld-exporter-0\" (UID: \"1097b4e8-b93a-4350-b74c-acf37be3a84f\") " pod="openstack/mysqld-exporter-0" Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.891385 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1097b4e8-b93a-4350-b74c-acf37be3a84f-config-data\") pod \"mysqld-exporter-0\" (UID: \"1097b4e8-b93a-4350-b74c-acf37be3a84f\") " pod="openstack/mysqld-exporter-0" Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.891477 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwlnm\" (UniqueName: \"kubernetes.io/projected/1097b4e8-b93a-4350-b74c-acf37be3a84f-kube-api-access-rwlnm\") pod \"mysqld-exporter-0\" (UID: \"1097b4e8-b93a-4350-b74c-acf37be3a84f\") " pod="openstack/mysqld-exporter-0" Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.998612 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1097b4e8-b93a-4350-b74c-acf37be3a84f-combined-ca-bundle\") pod \"mysqld-exporter-0\" (UID: \"1097b4e8-b93a-4350-b74c-acf37be3a84f\") " pod="openstack/mysqld-exporter-0" Sep 30 00:28:48 crc kubenswrapper[4809]: I0930 00:28:48.998682 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1097b4e8-b93a-4350-b74c-acf37be3a84f-config-data\") pod \"mysqld-exporter-0\" (UID: \"1097b4e8-b93a-4350-b74c-acf37be3a84f\") " pod="openstack/mysqld-exporter-0" Sep 30 00:28:49 crc kubenswrapper[4809]: I0930 00:28:48.998731 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwlnm\" (UniqueName: \"kubernetes.io/projected/1097b4e8-b93a-4350-b74c-acf37be3a84f-kube-api-access-rwlnm\") pod \"mysqld-exporter-0\" (UID: \"1097b4e8-b93a-4350-b74c-acf37be3a84f\") " pod="openstack/mysqld-exporter-0" Sep 30 00:28:49 crc kubenswrapper[4809]: I0930 00:28:49.019366 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1097b4e8-b93a-4350-b74c-acf37be3a84f-config-data\") pod \"mysqld-exporter-0\" (UID: \"1097b4e8-b93a-4350-b74c-acf37be3a84f\") " pod="openstack/mysqld-exporter-0" Sep 30 00:28:49 crc kubenswrapper[4809]: I0930 00:28:49.020501 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwlnm\" (UniqueName: \"kubernetes.io/projected/1097b4e8-b93a-4350-b74c-acf37be3a84f-kube-api-access-rwlnm\") pod \"mysqld-exporter-0\" (UID: \"1097b4e8-b93a-4350-b74c-acf37be3a84f\") " pod="openstack/mysqld-exporter-0" Sep 30 00:28:49 crc kubenswrapper[4809]: I0930 00:28:49.025015 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1097b4e8-b93a-4350-b74c-acf37be3a84f-combined-ca-bundle\") pod \"mysqld-exporter-0\" (UID: \"1097b4e8-b93a-4350-b74c-acf37be3a84f\") " pod="openstack/mysqld-exporter-0" Sep 30 00:28:49 crc kubenswrapper[4809]: I0930 00:28:49.039968 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-gj5gk" podUID="25170433-57de-43a1-8459-8c2e31c42ffb" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.136:5353: connect: connection refused" Sep 30 00:28:49 crc kubenswrapper[4809]: I0930 00:28:49.041362 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-491f-account-create-fwgpd" event={"ID":"70c3a309-0bd5-4672-bfeb-876af5a894ca","Type":"ContainerStarted","Data":"edf84148e7f48a352af75acba6195308720d30750b105ef224cc386cd2427ac5"} Sep 30 00:28:49 crc kubenswrapper[4809]: I0930 00:28:49.058834 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c98f-account-create-b2zgz" event={"ID":"eb21eb26-8a42-4a7c-ace5-392c2284e0e5","Type":"ContainerStarted","Data":"daf647f2ec2c8eabfa7ad9847e1fd9cd610f40dd7afd9c20ec38c946d0659c1a"} Sep 30 00:28:49 crc kubenswrapper[4809]: I0930 00:28:49.058878 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c98f-account-create-b2zgz" event={"ID":"eb21eb26-8a42-4a7c-ace5-392c2284e0e5","Type":"ContainerStarted","Data":"b3fea8b9cab70f837d2d2dbfb8067983f38cbe15918117aec06538a7c1838f70"} Sep 30 00:28:49 crc kubenswrapper[4809]: I0930 00:28:49.114070 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-f22f-account-create-ctshj"] Sep 30 00:28:49 crc kubenswrapper[4809]: I0930 00:28:49.121658 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-c98f-account-create-b2zgz" podStartSLOduration=2.121620356 podStartE2EDuration="2.121620356s" podCreationTimestamp="2025-09-30 00:28:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:28:49.103739355 +0000 UTC m=+1180.139988763" watchObservedRunningTime="2025-09-30 00:28:49.121620356 +0000 UTC m=+1180.157869764" Sep 30 00:28:49 crc kubenswrapper[4809]: W0930 00:28:49.127615 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod54c601fe_b3c3_40ee_bf5d_6f15b4535a22.slice/crio-e0786736951cc5acb09b4fe4ac29978cd1a903f6236bea5b56a9e17970e6d4c8 WatchSource:0}: Error finding container e0786736951cc5acb09b4fe4ac29978cd1a903f6236bea5b56a9e17970e6d4c8: Status 404 returned error can't find the container with id e0786736951cc5acb09b4fe4ac29978cd1a903f6236bea5b56a9e17970e6d4c8 Sep 30 00:28:49 crc kubenswrapper[4809]: I0930 00:28:49.133286 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-0" Sep 30 00:28:49 crc kubenswrapper[4809]: I0930 00:28:49.735569 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-0"] Sep 30 00:28:49 crc kubenswrapper[4809]: W0930 00:28:49.761907 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1097b4e8_b93a_4350_b74c_acf37be3a84f.slice/crio-b89c46e0a9cc32018b1c2a520287924a72e0e1d7b56d68b45ef88ebdaa74a2c3 WatchSource:0}: Error finding container b89c46e0a9cc32018b1c2a520287924a72e0e1d7b56d68b45ef88ebdaa74a2c3: Status 404 returned error can't find the container with id b89c46e0a9cc32018b1c2a520287924a72e0e1d7b56d68b45ef88ebdaa74a2c3 Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.069597 4809 generic.go:334] "Generic (PLEG): container finished" podID="70c3a309-0bd5-4672-bfeb-876af5a894ca" containerID="071c09296afcb6d02c440174eac027529130993f644e202748d64549a9f113fa" exitCode=0 Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.069923 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-491f-account-create-fwgpd" event={"ID":"70c3a309-0bd5-4672-bfeb-876af5a894ca","Type":"ContainerDied","Data":"071c09296afcb6d02c440174eac027529130993f644e202748d64549a9f113fa"} Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.072548 4809 generic.go:334] "Generic (PLEG): container finished" podID="eb21eb26-8a42-4a7c-ace5-392c2284e0e5" containerID="daf647f2ec2c8eabfa7ad9847e1fd9cd610f40dd7afd9c20ec38c946d0659c1a" exitCode=0 Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.072617 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c98f-account-create-b2zgz" event={"ID":"eb21eb26-8a42-4a7c-ace5-392c2284e0e5","Type":"ContainerDied","Data":"daf647f2ec2c8eabfa7ad9847e1fd9cd610f40dd7afd9c20ec38c946d0659c1a"} Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.073715 4809 generic.go:334] "Generic (PLEG): container finished" podID="33bc071b-ab55-4bea-a4b2-351c18e716e7" containerID="2c9aa7cc12bfc841a337cfc71a1f3e760d4059af076e2895731338412cdc1f5d" exitCode=0 Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.073761 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"33bc071b-ab55-4bea-a4b2-351c18e716e7","Type":"ContainerDied","Data":"2c9aa7cc12bfc841a337cfc71a1f3e760d4059af076e2895731338412cdc1f5d"} Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.076545 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-0" event={"ID":"1097b4e8-b93a-4350-b74c-acf37be3a84f","Type":"ContainerStarted","Data":"b89c46e0a9cc32018b1c2a520287924a72e0e1d7b56d68b45ef88ebdaa74a2c3"} Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.079279 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.081430 4809 generic.go:334] "Generic (PLEG): container finished" podID="54c601fe-b3c3-40ee-bf5d-6f15b4535a22" containerID="4237027638fb1fefdd8550cadce45021c733f8de87bf6aa518a79e25262558a9" exitCode=0 Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.081503 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-f22f-account-create-ctshj" event={"ID":"54c601fe-b3c3-40ee-bf5d-6f15b4535a22","Type":"ContainerDied","Data":"4237027638fb1fefdd8550cadce45021c733f8de87bf6aa518a79e25262558a9"} Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.081530 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-f22f-account-create-ctshj" event={"ID":"54c601fe-b3c3-40ee-bf5d-6f15b4535a22","Type":"ContainerStarted","Data":"e0786736951cc5acb09b4fe4ac29978cd1a903f6236bea5b56a9e17970e6d4c8"} Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.086676 4809 generic.go:334] "Generic (PLEG): container finished" podID="25170433-57de-43a1-8459-8c2e31c42ffb" containerID="a1a656863eb1d851d27c31fec10297b8a907f45be6450a97c40e34ba174e61fb" exitCode=0 Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.086730 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-gj5gk" event={"ID":"25170433-57de-43a1-8459-8c2e31c42ffb","Type":"ContainerDied","Data":"a1a656863eb1d851d27c31fec10297b8a907f45be6450a97c40e34ba174e61fb"} Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.086758 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-gj5gk" event={"ID":"25170433-57de-43a1-8459-8c2e31c42ffb","Type":"ContainerDied","Data":"460850622b0a24aa127c0e2c161dc03842db225f3e04c1ac63bb58dfe892ef75"} Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.086781 4809 scope.go:117] "RemoveContainer" containerID="a1a656863eb1d851d27c31fec10297b8a907f45be6450a97c40e34ba174e61fb" Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.086912 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-gj5gk" Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.216347 4809 scope.go:117] "RemoveContainer" containerID="336f954ee25cc9b309f23dd7eb6afff96ea5ddf0ad681113d006924c02f0c5f6" Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.230279 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-498v7\" (UniqueName: \"kubernetes.io/projected/25170433-57de-43a1-8459-8c2e31c42ffb-kube-api-access-498v7\") pod \"25170433-57de-43a1-8459-8c2e31c42ffb\" (UID: \"25170433-57de-43a1-8459-8c2e31c42ffb\") " Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.230337 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-config\") pod \"25170433-57de-43a1-8459-8c2e31c42ffb\" (UID: \"25170433-57de-43a1-8459-8c2e31c42ffb\") " Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.230380 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-ovsdbserver-nb\") pod \"25170433-57de-43a1-8459-8c2e31c42ffb\" (UID: \"25170433-57de-43a1-8459-8c2e31c42ffb\") " Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.230447 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-dns-svc\") pod \"25170433-57de-43a1-8459-8c2e31c42ffb\" (UID: \"25170433-57de-43a1-8459-8c2e31c42ffb\") " Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.230503 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-ovsdbserver-sb\") pod \"25170433-57de-43a1-8459-8c2e31c42ffb\" (UID: \"25170433-57de-43a1-8459-8c2e31c42ffb\") " Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.249520 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25170433-57de-43a1-8459-8c2e31c42ffb-kube-api-access-498v7" (OuterVolumeSpecName: "kube-api-access-498v7") pod "25170433-57de-43a1-8459-8c2e31c42ffb" (UID: "25170433-57de-43a1-8459-8c2e31c42ffb"). InnerVolumeSpecName "kube-api-access-498v7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.273405 4809 scope.go:117] "RemoveContainer" containerID="a1a656863eb1d851d27c31fec10297b8a907f45be6450a97c40e34ba174e61fb" Sep 30 00:28:50 crc kubenswrapper[4809]: E0930 00:28:50.274611 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1a656863eb1d851d27c31fec10297b8a907f45be6450a97c40e34ba174e61fb\": container with ID starting with a1a656863eb1d851d27c31fec10297b8a907f45be6450a97c40e34ba174e61fb not found: ID does not exist" containerID="a1a656863eb1d851d27c31fec10297b8a907f45be6450a97c40e34ba174e61fb" Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.274655 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1a656863eb1d851d27c31fec10297b8a907f45be6450a97c40e34ba174e61fb"} err="failed to get container status \"a1a656863eb1d851d27c31fec10297b8a907f45be6450a97c40e34ba174e61fb\": rpc error: code = NotFound desc = could not find container \"a1a656863eb1d851d27c31fec10297b8a907f45be6450a97c40e34ba174e61fb\": container with ID starting with a1a656863eb1d851d27c31fec10297b8a907f45be6450a97c40e34ba174e61fb not found: ID does not exist" Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.274676 4809 scope.go:117] "RemoveContainer" containerID="336f954ee25cc9b309f23dd7eb6afff96ea5ddf0ad681113d006924c02f0c5f6" Sep 30 00:28:50 crc kubenswrapper[4809]: E0930 00:28:50.276599 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"336f954ee25cc9b309f23dd7eb6afff96ea5ddf0ad681113d006924c02f0c5f6\": container with ID starting with 336f954ee25cc9b309f23dd7eb6afff96ea5ddf0ad681113d006924c02f0c5f6 not found: ID does not exist" containerID="336f954ee25cc9b309f23dd7eb6afff96ea5ddf0ad681113d006924c02f0c5f6" Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.276661 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"336f954ee25cc9b309f23dd7eb6afff96ea5ddf0ad681113d006924c02f0c5f6"} err="failed to get container status \"336f954ee25cc9b309f23dd7eb6afff96ea5ddf0ad681113d006924c02f0c5f6\": rpc error: code = NotFound desc = could not find container \"336f954ee25cc9b309f23dd7eb6afff96ea5ddf0ad681113d006924c02f0c5f6\": container with ID starting with 336f954ee25cc9b309f23dd7eb6afff96ea5ddf0ad681113d006924c02f0c5f6 not found: ID does not exist" Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.333923 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-498v7\" (UniqueName: \"kubernetes.io/projected/25170433-57de-43a1-8459-8c2e31c42ffb-kube-api-access-498v7\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.370048 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-config" (OuterVolumeSpecName: "config") pod "25170433-57de-43a1-8459-8c2e31c42ffb" (UID: "25170433-57de-43a1-8459-8c2e31c42ffb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.370077 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "25170433-57de-43a1-8459-8c2e31c42ffb" (UID: "25170433-57de-43a1-8459-8c2e31c42ffb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.392890 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "25170433-57de-43a1-8459-8c2e31c42ffb" (UID: "25170433-57de-43a1-8459-8c2e31c42ffb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.407359 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "25170433-57de-43a1-8459-8c2e31c42ffb" (UID: "25170433-57de-43a1-8459-8c2e31c42ffb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.436408 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.436440 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.436451 4809 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.436460 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25170433-57de-43a1-8459-8c2e31c42ffb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.729078 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-gj5gk"] Sep 30 00:28:50 crc kubenswrapper[4809]: I0930 00:28:50.738474 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-gj5gk"] Sep 30 00:28:51 crc kubenswrapper[4809]: I0930 00:28:51.100578 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"33bc071b-ab55-4bea-a4b2-351c18e716e7","Type":"ContainerStarted","Data":"83b1f53bda021f768c3feb2c5d496d1ad3796ff8f5abfc0c7a3c99531b918582"} Sep 30 00:28:51 crc kubenswrapper[4809]: I0930 00:28:51.704417 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25170433-57de-43a1-8459-8c2e31c42ffb" path="/var/lib/kubelet/pods/25170433-57de-43a1-8459-8c2e31c42ffb/volumes" Sep 30 00:28:54 crc kubenswrapper[4809]: I0930 00:28:54.132322 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-f22f-account-create-ctshj" event={"ID":"54c601fe-b3c3-40ee-bf5d-6f15b4535a22","Type":"ContainerDied","Data":"e0786736951cc5acb09b4fe4ac29978cd1a903f6236bea5b56a9e17970e6d4c8"} Sep 30 00:28:54 crc kubenswrapper[4809]: I0930 00:28:54.132700 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0786736951cc5acb09b4fe4ac29978cd1a903f6236bea5b56a9e17970e6d4c8" Sep 30 00:28:54 crc kubenswrapper[4809]: I0930 00:28:54.134295 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-491f-account-create-fwgpd" event={"ID":"70c3a309-0bd5-4672-bfeb-876af5a894ca","Type":"ContainerDied","Data":"edf84148e7f48a352af75acba6195308720d30750b105ef224cc386cd2427ac5"} Sep 30 00:28:54 crc kubenswrapper[4809]: I0930 00:28:54.134330 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="edf84148e7f48a352af75acba6195308720d30750b105ef224cc386cd2427ac5" Sep 30 00:28:54 crc kubenswrapper[4809]: I0930 00:28:54.136568 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c98f-account-create-b2zgz" event={"ID":"eb21eb26-8a42-4a7c-ace5-392c2284e0e5","Type":"ContainerDied","Data":"b3fea8b9cab70f837d2d2dbfb8067983f38cbe15918117aec06538a7c1838f70"} Sep 30 00:28:54 crc kubenswrapper[4809]: I0930 00:28:54.136618 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b3fea8b9cab70f837d2d2dbfb8067983f38cbe15918117aec06538a7c1838f70" Sep 30 00:28:54 crc kubenswrapper[4809]: I0930 00:28:54.139194 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"33bc071b-ab55-4bea-a4b2-351c18e716e7","Type":"ContainerStarted","Data":"7fb4b1689148fabd3b43b69ae99424ff3009da27c9e96c2e0dd37ed5cb97f2d0"} Sep 30 00:28:54 crc kubenswrapper[4809]: I0930 00:28:54.209456 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c98f-account-create-b2zgz" Sep 30 00:28:54 crc kubenswrapper[4809]: I0930 00:28:54.217550 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-491f-account-create-fwgpd" Sep 30 00:28:54 crc kubenswrapper[4809]: I0930 00:28:54.230284 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-f22f-account-create-ctshj" Sep 30 00:28:54 crc kubenswrapper[4809]: I0930 00:28:54.315673 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rj2c4\" (UniqueName: \"kubernetes.io/projected/54c601fe-b3c3-40ee-bf5d-6f15b4535a22-kube-api-access-rj2c4\") pod \"54c601fe-b3c3-40ee-bf5d-6f15b4535a22\" (UID: \"54c601fe-b3c3-40ee-bf5d-6f15b4535a22\") " Sep 30 00:28:54 crc kubenswrapper[4809]: I0930 00:28:54.315831 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w66nr\" (UniqueName: \"kubernetes.io/projected/70c3a309-0bd5-4672-bfeb-876af5a894ca-kube-api-access-w66nr\") pod \"70c3a309-0bd5-4672-bfeb-876af5a894ca\" (UID: \"70c3a309-0bd5-4672-bfeb-876af5a894ca\") " Sep 30 00:28:54 crc kubenswrapper[4809]: I0930 00:28:54.316003 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4fppg\" (UniqueName: \"kubernetes.io/projected/eb21eb26-8a42-4a7c-ace5-392c2284e0e5-kube-api-access-4fppg\") pod \"eb21eb26-8a42-4a7c-ace5-392c2284e0e5\" (UID: \"eb21eb26-8a42-4a7c-ace5-392c2284e0e5\") " Sep 30 00:28:54 crc kubenswrapper[4809]: I0930 00:28:54.321373 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb21eb26-8a42-4a7c-ace5-392c2284e0e5-kube-api-access-4fppg" (OuterVolumeSpecName: "kube-api-access-4fppg") pod "eb21eb26-8a42-4a7c-ace5-392c2284e0e5" (UID: "eb21eb26-8a42-4a7c-ace5-392c2284e0e5"). InnerVolumeSpecName "kube-api-access-4fppg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:54 crc kubenswrapper[4809]: I0930 00:28:54.321968 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54c601fe-b3c3-40ee-bf5d-6f15b4535a22-kube-api-access-rj2c4" (OuterVolumeSpecName: "kube-api-access-rj2c4") pod "54c601fe-b3c3-40ee-bf5d-6f15b4535a22" (UID: "54c601fe-b3c3-40ee-bf5d-6f15b4535a22"). InnerVolumeSpecName "kube-api-access-rj2c4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:54 crc kubenswrapper[4809]: I0930 00:28:54.322394 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70c3a309-0bd5-4672-bfeb-876af5a894ca-kube-api-access-w66nr" (OuterVolumeSpecName: "kube-api-access-w66nr") pod "70c3a309-0bd5-4672-bfeb-876af5a894ca" (UID: "70c3a309-0bd5-4672-bfeb-876af5a894ca"). InnerVolumeSpecName "kube-api-access-w66nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:28:54 crc kubenswrapper[4809]: I0930 00:28:54.417780 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rj2c4\" (UniqueName: \"kubernetes.io/projected/54c601fe-b3c3-40ee-bf5d-6f15b4535a22-kube-api-access-rj2c4\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:54 crc kubenswrapper[4809]: I0930 00:28:54.418060 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w66nr\" (UniqueName: \"kubernetes.io/projected/70c3a309-0bd5-4672-bfeb-876af5a894ca-kube-api-access-w66nr\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:54 crc kubenswrapper[4809]: I0930 00:28:54.418075 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4fppg\" (UniqueName: \"kubernetes.io/projected/eb21eb26-8a42-4a7c-ace5-392c2284e0e5-kube-api-access-4fppg\") on node \"crc\" DevicePath \"\"" Sep 30 00:28:55 crc kubenswrapper[4809]: I0930 00:28:55.154852 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c98f-account-create-b2zgz" Sep 30 00:28:55 crc kubenswrapper[4809]: I0930 00:28:55.154944 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-491f-account-create-fwgpd" Sep 30 00:28:55 crc kubenswrapper[4809]: I0930 00:28:55.154867 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-f22f-account-create-ctshj" Sep 30 00:28:57 crc kubenswrapper[4809]: I0930 00:28:57.186671 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"33bc071b-ab55-4bea-a4b2-351c18e716e7","Type":"ContainerStarted","Data":"18c193d8104392aa5707e5f2d07a3b621ba92f88ba9e6cd7bc50fe9454f2bb9d"} Sep 30 00:28:57 crc kubenswrapper[4809]: I0930 00:28:57.218202 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=19.218187287 podStartE2EDuration="19.218187287s" podCreationTimestamp="2025-09-30 00:28:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:28:57.216165232 +0000 UTC m=+1188.252414650" watchObservedRunningTime="2025-09-30 00:28:57.218187287 +0000 UTC m=+1188.254436695" Sep 30 00:28:58 crc kubenswrapper[4809]: I0930 00:28:58.295367 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-97da-account-create-qnblg"] Sep 30 00:28:58 crc kubenswrapper[4809]: E0930 00:28:58.298481 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25170433-57de-43a1-8459-8c2e31c42ffb" containerName="init" Sep 30 00:28:58 crc kubenswrapper[4809]: I0930 00:28:58.298503 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="25170433-57de-43a1-8459-8c2e31c42ffb" containerName="init" Sep 30 00:28:58 crc kubenswrapper[4809]: E0930 00:28:58.298532 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25170433-57de-43a1-8459-8c2e31c42ffb" containerName="dnsmasq-dns" Sep 30 00:28:58 crc kubenswrapper[4809]: I0930 00:28:58.298541 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="25170433-57de-43a1-8459-8c2e31c42ffb" containerName="dnsmasq-dns" Sep 30 00:28:58 crc kubenswrapper[4809]: E0930 00:28:58.298554 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54c601fe-b3c3-40ee-bf5d-6f15b4535a22" containerName="mariadb-account-create" Sep 30 00:28:58 crc kubenswrapper[4809]: I0930 00:28:58.298560 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="54c601fe-b3c3-40ee-bf5d-6f15b4535a22" containerName="mariadb-account-create" Sep 30 00:28:58 crc kubenswrapper[4809]: E0930 00:28:58.298581 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70c3a309-0bd5-4672-bfeb-876af5a894ca" containerName="mariadb-account-create" Sep 30 00:28:58 crc kubenswrapper[4809]: I0930 00:28:58.298587 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="70c3a309-0bd5-4672-bfeb-876af5a894ca" containerName="mariadb-account-create" Sep 30 00:28:58 crc kubenswrapper[4809]: E0930 00:28:58.298599 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb21eb26-8a42-4a7c-ace5-392c2284e0e5" containerName="mariadb-account-create" Sep 30 00:28:58 crc kubenswrapper[4809]: I0930 00:28:58.298604 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb21eb26-8a42-4a7c-ace5-392c2284e0e5" containerName="mariadb-account-create" Sep 30 00:28:58 crc kubenswrapper[4809]: I0930 00:28:58.298783 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="70c3a309-0bd5-4672-bfeb-876af5a894ca" containerName="mariadb-account-create" Sep 30 00:28:58 crc kubenswrapper[4809]: I0930 00:28:58.298795 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="54c601fe-b3c3-40ee-bf5d-6f15b4535a22" containerName="mariadb-account-create" Sep 30 00:28:58 crc kubenswrapper[4809]: I0930 00:28:58.298807 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="25170433-57de-43a1-8459-8c2e31c42ffb" containerName="dnsmasq-dns" Sep 30 00:28:58 crc kubenswrapper[4809]: I0930 00:28:58.298825 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb21eb26-8a42-4a7c-ace5-392c2284e0e5" containerName="mariadb-account-create" Sep 30 00:28:58 crc kubenswrapper[4809]: I0930 00:28:58.299487 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-97da-account-create-qnblg" Sep 30 00:28:58 crc kubenswrapper[4809]: I0930 00:28:58.304025 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Sep 30 00:28:58 crc kubenswrapper[4809]: I0930 00:28:58.307463 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-97da-account-create-qnblg"] Sep 30 00:28:58 crc kubenswrapper[4809]: I0930 00:28:58.393977 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tn8sp\" (UniqueName: \"kubernetes.io/projected/038807b6-5f36-4116-a895-c64ad207d87d-kube-api-access-tn8sp\") pod \"neutron-97da-account-create-qnblg\" (UID: \"038807b6-5f36-4116-a895-c64ad207d87d\") " pod="openstack/neutron-97da-account-create-qnblg" Sep 30 00:28:58 crc kubenswrapper[4809]: I0930 00:28:58.495919 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tn8sp\" (UniqueName: \"kubernetes.io/projected/038807b6-5f36-4116-a895-c64ad207d87d-kube-api-access-tn8sp\") pod \"neutron-97da-account-create-qnblg\" (UID: \"038807b6-5f36-4116-a895-c64ad207d87d\") " pod="openstack/neutron-97da-account-create-qnblg" Sep 30 00:28:58 crc kubenswrapper[4809]: I0930 00:28:58.519377 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tn8sp\" (UniqueName: \"kubernetes.io/projected/038807b6-5f36-4116-a895-c64ad207d87d-kube-api-access-tn8sp\") pod \"neutron-97da-account-create-qnblg\" (UID: \"038807b6-5f36-4116-a895-c64ad207d87d\") " pod="openstack/neutron-97da-account-create-qnblg" Sep 30 00:28:58 crc kubenswrapper[4809]: I0930 00:28:58.625280 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-97da-account-create-qnblg" Sep 30 00:28:59 crc kubenswrapper[4809]: I0930 00:28:59.233862 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Sep 30 00:29:04 crc kubenswrapper[4809]: E0930 00:29:04.015170 4809 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Sep 30 00:29:04 crc kubenswrapper[4809]: E0930 00:29:04.015976 4809 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-47xf5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-lnq6w_openstack(e3c04595-d3a8-4477-9daf-46b43a8750dd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 00:29:04 crc kubenswrapper[4809]: E0930 00:29:04.017784 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-lnq6w" podUID="e3c04595-d3a8-4477-9daf-46b43a8750dd" Sep 30 00:29:04 crc kubenswrapper[4809]: E0930 00:29:04.248683 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-lnq6w" podUID="e3c04595-d3a8-4477-9daf-46b43a8750dd" Sep 30 00:29:04 crc kubenswrapper[4809]: I0930 00:29:04.513523 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-97da-account-create-qnblg"] Sep 30 00:29:05 crc kubenswrapper[4809]: I0930 00:29:05.259414 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-0" event={"ID":"1097b4e8-b93a-4350-b74c-acf37be3a84f","Type":"ContainerStarted","Data":"1d34205b33dbd022c8276ab14c2238065ebe82342cc3812d0abee5fb4f987479"} Sep 30 00:29:05 crc kubenswrapper[4809]: I0930 00:29:05.265459 4809 generic.go:334] "Generic (PLEG): container finished" podID="038807b6-5f36-4116-a895-c64ad207d87d" containerID="864eef583f9e50939f1797ea0cdfa2310fe30f4fc2e6e3e476dca8519fdbf12f" exitCode=0 Sep 30 00:29:05 crc kubenswrapper[4809]: I0930 00:29:05.265576 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-97da-account-create-qnblg" event={"ID":"038807b6-5f36-4116-a895-c64ad207d87d","Type":"ContainerDied","Data":"864eef583f9e50939f1797ea0cdfa2310fe30f4fc2e6e3e476dca8519fdbf12f"} Sep 30 00:29:05 crc kubenswrapper[4809]: I0930 00:29:05.265601 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-97da-account-create-qnblg" event={"ID":"038807b6-5f36-4116-a895-c64ad207d87d","Type":"ContainerStarted","Data":"a4a635dcfc3d74542698a5056690eeb6ce6427296c168491d398688dd11ccd54"} Sep 30 00:29:05 crc kubenswrapper[4809]: I0930 00:29:05.267659 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-j2xxj" event={"ID":"e71457ea-c5a1-4a8b-8524-68181a838ffd","Type":"ContainerStarted","Data":"56c82cab30fc23b9821f8a17e4b7e7236f69ca46bd2e318ca796316377904af0"} Sep 30 00:29:05 crc kubenswrapper[4809]: I0930 00:29:05.339322 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mysqld-exporter-0" podStartSLOduration=3.108092774 podStartE2EDuration="17.339296429s" podCreationTimestamp="2025-09-30 00:28:48 +0000 UTC" firstStartedPulling="2025-09-30 00:28:49.769037508 +0000 UTC m=+1180.805286916" lastFinishedPulling="2025-09-30 00:29:04.000241163 +0000 UTC m=+1195.036490571" observedRunningTime="2025-09-30 00:29:05.280169746 +0000 UTC m=+1196.316419174" watchObservedRunningTime="2025-09-30 00:29:05.339296429 +0000 UTC m=+1196.375545837" Sep 30 00:29:05 crc kubenswrapper[4809]: I0930 00:29:05.420818 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-j2xxj" podStartSLOduration=12.148001587 podStartE2EDuration="19.420793624s" podCreationTimestamp="2025-09-30 00:28:46 +0000 UTC" firstStartedPulling="2025-09-30 00:28:47.601300857 +0000 UTC m=+1178.637550265" lastFinishedPulling="2025-09-30 00:28:54.874092854 +0000 UTC m=+1185.910342302" observedRunningTime="2025-09-30 00:29:05.335732423 +0000 UTC m=+1196.371981831" watchObservedRunningTime="2025-09-30 00:29:05.420793624 +0000 UTC m=+1196.457043032" Sep 30 00:29:06 crc kubenswrapper[4809]: I0930 00:29:06.609852 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-97da-account-create-qnblg" Sep 30 00:29:06 crc kubenswrapper[4809]: I0930 00:29:06.674930 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tn8sp\" (UniqueName: \"kubernetes.io/projected/038807b6-5f36-4116-a895-c64ad207d87d-kube-api-access-tn8sp\") pod \"038807b6-5f36-4116-a895-c64ad207d87d\" (UID: \"038807b6-5f36-4116-a895-c64ad207d87d\") " Sep 30 00:29:06 crc kubenswrapper[4809]: I0930 00:29:06.680978 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/038807b6-5f36-4116-a895-c64ad207d87d-kube-api-access-tn8sp" (OuterVolumeSpecName: "kube-api-access-tn8sp") pod "038807b6-5f36-4116-a895-c64ad207d87d" (UID: "038807b6-5f36-4116-a895-c64ad207d87d"). InnerVolumeSpecName "kube-api-access-tn8sp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:29:06 crc kubenswrapper[4809]: I0930 00:29:06.777590 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tn8sp\" (UniqueName: \"kubernetes.io/projected/038807b6-5f36-4116-a895-c64ad207d87d-kube-api-access-tn8sp\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:07 crc kubenswrapper[4809]: I0930 00:29:07.287583 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-97da-account-create-qnblg" event={"ID":"038807b6-5f36-4116-a895-c64ad207d87d","Type":"ContainerDied","Data":"a4a635dcfc3d74542698a5056690eeb6ce6427296c168491d398688dd11ccd54"} Sep 30 00:29:07 crc kubenswrapper[4809]: I0930 00:29:07.287619 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4a635dcfc3d74542698a5056690eeb6ce6427296c168491d398688dd11ccd54" Sep 30 00:29:07 crc kubenswrapper[4809]: I0930 00:29:07.287634 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-97da-account-create-qnblg" Sep 30 00:29:09 crc kubenswrapper[4809]: I0930 00:29:09.233988 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Sep 30 00:29:09 crc kubenswrapper[4809]: I0930 00:29:09.239579 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Sep 30 00:29:09 crc kubenswrapper[4809]: I0930 00:29:09.316860 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Sep 30 00:29:18 crc kubenswrapper[4809]: I0930 00:29:18.411770 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-lnq6w" event={"ID":"e3c04595-d3a8-4477-9daf-46b43a8750dd","Type":"ContainerStarted","Data":"00e21decf5cad846ef0c19e1ed9c06bb4fe8db15585a3cb67a239628dd8195f6"} Sep 30 00:29:18 crc kubenswrapper[4809]: I0930 00:29:18.428710 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-lnq6w" podStartSLOduration=2.311964714 podStartE2EDuration="32.428691932s" podCreationTimestamp="2025-09-30 00:28:46 +0000 UTC" firstStartedPulling="2025-09-30 00:28:47.616290701 +0000 UTC m=+1178.652540109" lastFinishedPulling="2025-09-30 00:29:17.733017919 +0000 UTC m=+1208.769267327" observedRunningTime="2025-09-30 00:29:18.428518448 +0000 UTC m=+1209.464767876" watchObservedRunningTime="2025-09-30 00:29:18.428691932 +0000 UTC m=+1209.464941340" Sep 30 00:29:20 crc kubenswrapper[4809]: I0930 00:29:20.453668 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-j2xxj" event={"ID":"e71457ea-c5a1-4a8b-8524-68181a838ffd","Type":"ContainerDied","Data":"56c82cab30fc23b9821f8a17e4b7e7236f69ca46bd2e318ca796316377904af0"} Sep 30 00:29:20 crc kubenswrapper[4809]: I0930 00:29:20.453630 4809 generic.go:334] "Generic (PLEG): container finished" podID="e71457ea-c5a1-4a8b-8524-68181a838ffd" containerID="56c82cab30fc23b9821f8a17e4b7e7236f69ca46bd2e318ca796316377904af0" exitCode=0 Sep 30 00:29:21 crc kubenswrapper[4809]: I0930 00:29:21.862774 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-j2xxj" Sep 30 00:29:21 crc kubenswrapper[4809]: I0930 00:29:21.987908 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e71457ea-c5a1-4a8b-8524-68181a838ffd-config-data\") pod \"e71457ea-c5a1-4a8b-8524-68181a838ffd\" (UID: \"e71457ea-c5a1-4a8b-8524-68181a838ffd\") " Sep 30 00:29:21 crc kubenswrapper[4809]: I0930 00:29:21.988063 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e71457ea-c5a1-4a8b-8524-68181a838ffd-combined-ca-bundle\") pod \"e71457ea-c5a1-4a8b-8524-68181a838ffd\" (UID: \"e71457ea-c5a1-4a8b-8524-68181a838ffd\") " Sep 30 00:29:21 crc kubenswrapper[4809]: I0930 00:29:21.988144 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpjfl\" (UniqueName: \"kubernetes.io/projected/e71457ea-c5a1-4a8b-8524-68181a838ffd-kube-api-access-fpjfl\") pod \"e71457ea-c5a1-4a8b-8524-68181a838ffd\" (UID: \"e71457ea-c5a1-4a8b-8524-68181a838ffd\") " Sep 30 00:29:21 crc kubenswrapper[4809]: I0930 00:29:21.993554 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e71457ea-c5a1-4a8b-8524-68181a838ffd-kube-api-access-fpjfl" (OuterVolumeSpecName: "kube-api-access-fpjfl") pod "e71457ea-c5a1-4a8b-8524-68181a838ffd" (UID: "e71457ea-c5a1-4a8b-8524-68181a838ffd"). InnerVolumeSpecName "kube-api-access-fpjfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.026794 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e71457ea-c5a1-4a8b-8524-68181a838ffd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e71457ea-c5a1-4a8b-8524-68181a838ffd" (UID: "e71457ea-c5a1-4a8b-8524-68181a838ffd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.033981 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e71457ea-c5a1-4a8b-8524-68181a838ffd-config-data" (OuterVolumeSpecName: "config-data") pod "e71457ea-c5a1-4a8b-8524-68181a838ffd" (UID: "e71457ea-c5a1-4a8b-8524-68181a838ffd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.089926 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e71457ea-c5a1-4a8b-8524-68181a838ffd-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.089962 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e71457ea-c5a1-4a8b-8524-68181a838ffd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.089976 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpjfl\" (UniqueName: \"kubernetes.io/projected/e71457ea-c5a1-4a8b-8524-68181a838ffd-kube-api-access-fpjfl\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.481023 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-j2xxj" event={"ID":"e71457ea-c5a1-4a8b-8524-68181a838ffd","Type":"ContainerDied","Data":"ea13d64d565db160566e7f4f3ea92b015205fa2fbee0807e0d687630693d928a"} Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.481251 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea13d64d565db160566e7f4f3ea92b015205fa2fbee0807e0d687630693d928a" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.481194 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-j2xxj" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.712704 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-jlkfr"] Sep 30 00:29:22 crc kubenswrapper[4809]: E0930 00:29:22.713167 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e71457ea-c5a1-4a8b-8524-68181a838ffd" containerName="keystone-db-sync" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.713188 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="e71457ea-c5a1-4a8b-8524-68181a838ffd" containerName="keystone-db-sync" Sep 30 00:29:22 crc kubenswrapper[4809]: E0930 00:29:22.713208 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="038807b6-5f36-4116-a895-c64ad207d87d" containerName="mariadb-account-create" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.713218 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="038807b6-5f36-4116-a895-c64ad207d87d" containerName="mariadb-account-create" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.713431 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="e71457ea-c5a1-4a8b-8524-68181a838ffd" containerName="keystone-db-sync" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.713467 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="038807b6-5f36-4116-a895-c64ad207d87d" containerName="mariadb-account-create" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.714251 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.718249 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.718540 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.718793 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-8vz6r" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.718933 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.748179 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-jlkfr"] Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.764807 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-p8h2h"] Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.766883 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.802232 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-config-data\") pod \"keystone-bootstrap-jlkfr\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.802396 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrhwx\" (UniqueName: \"kubernetes.io/projected/5662ece6-c739-474c-bb4e-4ec3a1e00cff-kube-api-access-wrhwx\") pod \"keystone-bootstrap-jlkfr\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.802443 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-fernet-keys\") pod \"keystone-bootstrap-jlkfr\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.802467 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-combined-ca-bundle\") pod \"keystone-bootstrap-jlkfr\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.802544 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-scripts\") pod \"keystone-bootstrap-jlkfr\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.802574 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-credential-keys\") pod \"keystone-bootstrap-jlkfr\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.818283 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-qttpq"] Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.819857 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-qttpq" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.824923 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-j9vcq" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.825141 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.833611 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-p8h2h"] Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.843535 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-qttpq"] Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.904058 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzxz6\" (UniqueName: \"kubernetes.io/projected/23ff9291-ad89-46a6-9d4a-adf9e545adb2-kube-api-access-zzxz6\") pod \"heat-db-sync-qttpq\" (UID: \"23ff9291-ad89-46a6-9d4a-adf9e545adb2\") " pod="openstack/heat-db-sync-qttpq" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.904112 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-credential-keys\") pod \"keystone-bootstrap-jlkfr\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.904154 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-p8h2h\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.904181 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-dns-svc\") pod \"dnsmasq-dns-55fff446b9-p8h2h\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.904216 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-config-data\") pod \"keystone-bootstrap-jlkfr\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.904232 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-config\") pod \"dnsmasq-dns-55fff446b9-p8h2h\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.904261 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23ff9291-ad89-46a6-9d4a-adf9e545adb2-config-data\") pod \"heat-db-sync-qttpq\" (UID: \"23ff9291-ad89-46a6-9d4a-adf9e545adb2\") " pod="openstack/heat-db-sync-qttpq" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.904279 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23ff9291-ad89-46a6-9d4a-adf9e545adb2-combined-ca-bundle\") pod \"heat-db-sync-qttpq\" (UID: \"23ff9291-ad89-46a6-9d4a-adf9e545adb2\") " pod="openstack/heat-db-sync-qttpq" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.904309 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-p8h2h\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.904326 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-p8h2h\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.904361 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrhwx\" (UniqueName: \"kubernetes.io/projected/5662ece6-c739-474c-bb4e-4ec3a1e00cff-kube-api-access-wrhwx\") pod \"keystone-bootstrap-jlkfr\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.904386 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-226lr\" (UniqueName: \"kubernetes.io/projected/17c2589f-a9fa-4c83-8622-85db46303f8d-kube-api-access-226lr\") pod \"dnsmasq-dns-55fff446b9-p8h2h\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.904410 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-fernet-keys\") pod \"keystone-bootstrap-jlkfr\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.904432 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-combined-ca-bundle\") pod \"keystone-bootstrap-jlkfr\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.904460 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-scripts\") pod \"keystone-bootstrap-jlkfr\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.916775 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-scripts\") pod \"keystone-bootstrap-jlkfr\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.920442 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-combined-ca-bundle\") pod \"keystone-bootstrap-jlkfr\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.921385 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-config-data\") pod \"keystone-bootstrap-jlkfr\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.921893 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-fernet-keys\") pod \"keystone-bootstrap-jlkfr\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.927134 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-credential-keys\") pod \"keystone-bootstrap-jlkfr\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.931215 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrhwx\" (UniqueName: \"kubernetes.io/projected/5662ece6-c739-474c-bb4e-4ec3a1e00cff-kube-api-access-wrhwx\") pod \"keystone-bootstrap-jlkfr\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.957397 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-qwzxp"] Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.958889 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.963622 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.963947 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-p2vzz" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.965474 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-j7xdb"] Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.967040 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-j7xdb" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.967706 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.970074 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.970236 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-2dtft" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.970304 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 30 00:29:22 crc kubenswrapper[4809]: I0930 00:29:22.990760 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-qwzxp"] Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.006295 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-j7xdb"] Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.007549 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-p8h2h\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.007598 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-dns-svc\") pod \"dnsmasq-dns-55fff446b9-p8h2h\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.007665 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-config\") pod \"dnsmasq-dns-55fff446b9-p8h2h\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.007702 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23ff9291-ad89-46a6-9d4a-adf9e545adb2-config-data\") pod \"heat-db-sync-qttpq\" (UID: \"23ff9291-ad89-46a6-9d4a-adf9e545adb2\") " pod="openstack/heat-db-sync-qttpq" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.007730 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23ff9291-ad89-46a6-9d4a-adf9e545adb2-combined-ca-bundle\") pod \"heat-db-sync-qttpq\" (UID: \"23ff9291-ad89-46a6-9d4a-adf9e545adb2\") " pod="openstack/heat-db-sync-qttpq" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.007763 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-p8h2h\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.007785 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-p8h2h\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.007843 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-226lr\" (UniqueName: \"kubernetes.io/projected/17c2589f-a9fa-4c83-8622-85db46303f8d-kube-api-access-226lr\") pod \"dnsmasq-dns-55fff446b9-p8h2h\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.007905 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzxz6\" (UniqueName: \"kubernetes.io/projected/23ff9291-ad89-46a6-9d4a-adf9e545adb2-kube-api-access-zzxz6\") pod \"heat-db-sync-qttpq\" (UID: \"23ff9291-ad89-46a6-9d4a-adf9e545adb2\") " pod="openstack/heat-db-sync-qttpq" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.009361 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-p8h2h\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.009899 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-dns-svc\") pod \"dnsmasq-dns-55fff446b9-p8h2h\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.010444 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-p8h2h\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.010974 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-p8h2h\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.012072 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-config\") pod \"dnsmasq-dns-55fff446b9-p8h2h\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.012911 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23ff9291-ad89-46a6-9d4a-adf9e545adb2-combined-ca-bundle\") pod \"heat-db-sync-qttpq\" (UID: \"23ff9291-ad89-46a6-9d4a-adf9e545adb2\") " pod="openstack/heat-db-sync-qttpq" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.025577 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23ff9291-ad89-46a6-9d4a-adf9e545adb2-config-data\") pod \"heat-db-sync-qttpq\" (UID: \"23ff9291-ad89-46a6-9d4a-adf9e545adb2\") " pod="openstack/heat-db-sync-qttpq" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.035611 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.039936 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzxz6\" (UniqueName: \"kubernetes.io/projected/23ff9291-ad89-46a6-9d4a-adf9e545adb2-kube-api-access-zzxz6\") pod \"heat-db-sync-qttpq\" (UID: \"23ff9291-ad89-46a6-9d4a-adf9e545adb2\") " pod="openstack/heat-db-sync-qttpq" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.056808 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-226lr\" (UniqueName: \"kubernetes.io/projected/17c2589f-a9fa-4c83-8622-85db46303f8d-kube-api-access-226lr\") pod \"dnsmasq-dns-55fff446b9-p8h2h\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.089464 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-jkrgs"] Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.101283 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jkrgs" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.116021 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.116550 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-l58q9" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.116600 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.118044 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a0424380-2493-4499-8314-274a6af06aa4-config\") pod \"neutron-db-sync-j7xdb\" (UID: \"a0424380-2493-4499-8314-274a6af06aa4\") " pod="openstack/neutron-db-sync-j7xdb" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.118083 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-scripts\") pod \"cinder-db-sync-qwzxp\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.118128 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sw25b\" (UniqueName: \"kubernetes.io/projected/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-kube-api-access-sw25b\") pod \"cinder-db-sync-qwzxp\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.118152 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-db-sync-config-data\") pod \"cinder-db-sync-qwzxp\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.118186 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-etc-machine-id\") pod \"cinder-db-sync-qwzxp\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.118272 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0424380-2493-4499-8314-274a6af06aa4-combined-ca-bundle\") pod \"neutron-db-sync-j7xdb\" (UID: \"a0424380-2493-4499-8314-274a6af06aa4\") " pod="openstack/neutron-db-sync-j7xdb" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.118291 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9w4vm\" (UniqueName: \"kubernetes.io/projected/a0424380-2493-4499-8314-274a6af06aa4-kube-api-access-9w4vm\") pod \"neutron-db-sync-j7xdb\" (UID: \"a0424380-2493-4499-8314-274a6af06aa4\") " pod="openstack/neutron-db-sync-j7xdb" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.118322 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-combined-ca-bundle\") pod \"cinder-db-sync-qwzxp\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.118347 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-config-data\") pod \"cinder-db-sync-qwzxp\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.118530 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-jkrgs"] Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.147212 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-qttpq" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.186049 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-p8h2h"] Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.199986 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-q8nkh"] Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.201530 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-q8nkh" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.206687 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.207119 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-fnr26" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.211135 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.224382 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-etc-machine-id\") pod \"cinder-db-sync-qwzxp\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.224521 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1ccba93-c72a-4aea-b972-c54bbe6bfc99-combined-ca-bundle\") pod \"barbican-db-sync-jkrgs\" (UID: \"a1ccba93-c72a-4aea-b972-c54bbe6bfc99\") " pod="openstack/barbican-db-sync-jkrgs" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.224606 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0424380-2493-4499-8314-274a6af06aa4-combined-ca-bundle\") pod \"neutron-db-sync-j7xdb\" (UID: \"a0424380-2493-4499-8314-274a6af06aa4\") " pod="openstack/neutron-db-sync-j7xdb" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.224614 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-etc-machine-id\") pod \"cinder-db-sync-qwzxp\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.224635 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a1ccba93-c72a-4aea-b972-c54bbe6bfc99-db-sync-config-data\") pod \"barbican-db-sync-jkrgs\" (UID: \"a1ccba93-c72a-4aea-b972-c54bbe6bfc99\") " pod="openstack/barbican-db-sync-jkrgs" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.224683 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9w4vm\" (UniqueName: \"kubernetes.io/projected/a0424380-2493-4499-8314-274a6af06aa4-kube-api-access-9w4vm\") pod \"neutron-db-sync-j7xdb\" (UID: \"a0424380-2493-4499-8314-274a6af06aa4\") " pod="openstack/neutron-db-sync-j7xdb" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.224765 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-combined-ca-bundle\") pod \"cinder-db-sync-qwzxp\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.224827 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-config-data\") pod \"cinder-db-sync-qwzxp\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.224935 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a0424380-2493-4499-8314-274a6af06aa4-config\") pod \"neutron-db-sync-j7xdb\" (UID: \"a0424380-2493-4499-8314-274a6af06aa4\") " pod="openstack/neutron-db-sync-j7xdb" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.224962 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-scripts\") pod \"cinder-db-sync-qwzxp\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.225075 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sw25b\" (UniqueName: \"kubernetes.io/projected/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-kube-api-access-sw25b\") pod \"cinder-db-sync-qwzxp\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.225121 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m75gn\" (UniqueName: \"kubernetes.io/projected/a1ccba93-c72a-4aea-b972-c54bbe6bfc99-kube-api-access-m75gn\") pod \"barbican-db-sync-jkrgs\" (UID: \"a1ccba93-c72a-4aea-b972-c54bbe6bfc99\") " pod="openstack/barbican-db-sync-jkrgs" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.225153 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-db-sync-config-data\") pod \"cinder-db-sync-qwzxp\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.230911 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-config-data\") pod \"cinder-db-sync-qwzxp\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.231022 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-q8nkh"] Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.233904 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0424380-2493-4499-8314-274a6af06aa4-combined-ca-bundle\") pod \"neutron-db-sync-j7xdb\" (UID: \"a0424380-2493-4499-8314-274a6af06aa4\") " pod="openstack/neutron-db-sync-j7xdb" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.236163 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-scripts\") pod \"cinder-db-sync-qwzxp\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.236686 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-combined-ca-bundle\") pod \"cinder-db-sync-qwzxp\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.236802 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a0424380-2493-4499-8314-274a6af06aa4-config\") pod \"neutron-db-sync-j7xdb\" (UID: \"a0424380-2493-4499-8314-274a6af06aa4\") " pod="openstack/neutron-db-sync-j7xdb" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.246728 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-t9mpd"] Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.248827 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.252963 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sw25b\" (UniqueName: \"kubernetes.io/projected/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-kube-api-access-sw25b\") pod \"cinder-db-sync-qwzxp\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.253399 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9w4vm\" (UniqueName: \"kubernetes.io/projected/a0424380-2493-4499-8314-274a6af06aa4-kube-api-access-9w4vm\") pod \"neutron-db-sync-j7xdb\" (UID: \"a0424380-2493-4499-8314-274a6af06aa4\") " pod="openstack/neutron-db-sync-j7xdb" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.254429 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-db-sync-config-data\") pod \"cinder-db-sync-qwzxp\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.263719 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-t9mpd"] Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.327102 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-config-data\") pod \"placement-db-sync-q8nkh\" (UID: \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\") " pod="openstack/placement-db-sync-q8nkh" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.327162 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m75gn\" (UniqueName: \"kubernetes.io/projected/a1ccba93-c72a-4aea-b972-c54bbe6bfc99-kube-api-access-m75gn\") pod \"barbican-db-sync-jkrgs\" (UID: \"a1ccba93-c72a-4aea-b972-c54bbe6bfc99\") " pod="openstack/barbican-db-sync-jkrgs" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.327205 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-combined-ca-bundle\") pod \"placement-db-sync-q8nkh\" (UID: \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\") " pod="openstack/placement-db-sync-q8nkh" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.327237 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1ccba93-c72a-4aea-b972-c54bbe6bfc99-combined-ca-bundle\") pod \"barbican-db-sync-jkrgs\" (UID: \"a1ccba93-c72a-4aea-b972-c54bbe6bfc99\") " pod="openstack/barbican-db-sync-jkrgs" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.327280 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a1ccba93-c72a-4aea-b972-c54bbe6bfc99-db-sync-config-data\") pod \"barbican-db-sync-jkrgs\" (UID: \"a1ccba93-c72a-4aea-b972-c54bbe6bfc99\") " pod="openstack/barbican-db-sync-jkrgs" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.327308 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-logs\") pod \"placement-db-sync-q8nkh\" (UID: \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\") " pod="openstack/placement-db-sync-q8nkh" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.327327 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pk4fx\" (UniqueName: \"kubernetes.io/projected/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-kube-api-access-pk4fx\") pod \"placement-db-sync-q8nkh\" (UID: \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\") " pod="openstack/placement-db-sync-q8nkh" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.327348 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-scripts\") pod \"placement-db-sync-q8nkh\" (UID: \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\") " pod="openstack/placement-db-sync-q8nkh" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.335742 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1ccba93-c72a-4aea-b972-c54bbe6bfc99-combined-ca-bundle\") pod \"barbican-db-sync-jkrgs\" (UID: \"a1ccba93-c72a-4aea-b972-c54bbe6bfc99\") " pod="openstack/barbican-db-sync-jkrgs" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.337697 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a1ccba93-c72a-4aea-b972-c54bbe6bfc99-db-sync-config-data\") pod \"barbican-db-sync-jkrgs\" (UID: \"a1ccba93-c72a-4aea-b972-c54bbe6bfc99\") " pod="openstack/barbican-db-sync-jkrgs" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.347181 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m75gn\" (UniqueName: \"kubernetes.io/projected/a1ccba93-c72a-4aea-b972-c54bbe6bfc99-kube-api-access-m75gn\") pod \"barbican-db-sync-jkrgs\" (UID: \"a1ccba93-c72a-4aea-b972-c54bbe6bfc99\") " pod="openstack/barbican-db-sync-jkrgs" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.449629 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.450243 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-t9mpd\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.450351 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-t9mpd\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.450388 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-config-data\") pod \"placement-db-sync-q8nkh\" (UID: \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\") " pod="openstack/placement-db-sync-q8nkh" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.450422 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-t9mpd\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.450449 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txt7k\" (UniqueName: \"kubernetes.io/projected/c63835cb-f0af-438b-8213-da9cfb939cf9-kube-api-access-txt7k\") pod \"dnsmasq-dns-76fcf4b695-t9mpd\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.450484 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-combined-ca-bundle\") pod \"placement-db-sync-q8nkh\" (UID: \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\") " pod="openstack/placement-db-sync-q8nkh" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.450517 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-t9mpd\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.450581 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-logs\") pod \"placement-db-sync-q8nkh\" (UID: \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\") " pod="openstack/placement-db-sync-q8nkh" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.450610 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pk4fx\" (UniqueName: \"kubernetes.io/projected/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-kube-api-access-pk4fx\") pod \"placement-db-sync-q8nkh\" (UID: \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\") " pod="openstack/placement-db-sync-q8nkh" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.450659 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-scripts\") pod \"placement-db-sync-q8nkh\" (UID: \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\") " pod="openstack/placement-db-sync-q8nkh" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.450710 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-config\") pod \"dnsmasq-dns-76fcf4b695-t9mpd\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.451765 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-logs\") pod \"placement-db-sync-q8nkh\" (UID: \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\") " pod="openstack/placement-db-sync-q8nkh" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.457510 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-config-data\") pod \"placement-db-sync-q8nkh\" (UID: \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\") " pod="openstack/placement-db-sync-q8nkh" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.458199 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-combined-ca-bundle\") pod \"placement-db-sync-q8nkh\" (UID: \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\") " pod="openstack/placement-db-sync-q8nkh" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.464075 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-j7xdb" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.472849 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-scripts\") pod \"placement-db-sync-q8nkh\" (UID: \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\") " pod="openstack/placement-db-sync-q8nkh" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.477480 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pk4fx\" (UniqueName: \"kubernetes.io/projected/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-kube-api-access-pk4fx\") pod \"placement-db-sync-q8nkh\" (UID: \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\") " pod="openstack/placement-db-sync-q8nkh" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.489213 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jkrgs" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.501005 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.504149 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.523447 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.523898 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.524128 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.558952 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-q8nkh" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.560707 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-t9mpd\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.560797 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-config\") pod \"dnsmasq-dns-76fcf4b695-t9mpd\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.560842 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-t9mpd\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.560868 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-t9mpd\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.560896 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-t9mpd\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.560917 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txt7k\" (UniqueName: \"kubernetes.io/projected/c63835cb-f0af-438b-8213-da9cfb939cf9-kube-api-access-txt7k\") pod \"dnsmasq-dns-76fcf4b695-t9mpd\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.561901 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-t9mpd\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.563509 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-t9mpd\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.572310 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-t9mpd\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.578547 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-config\") pod \"dnsmasq-dns-76fcf4b695-t9mpd\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.578613 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-t9mpd\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.594275 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txt7k\" (UniqueName: \"kubernetes.io/projected/c63835cb-f0af-438b-8213-da9cfb939cf9-kube-api-access-txt7k\") pod \"dnsmasq-dns-76fcf4b695-t9mpd\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.672796 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.672899 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.672949 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqjgv\" (UniqueName: \"kubernetes.io/projected/3efcbfdc-c939-452c-96e5-244bc7b7bff3-kube-api-access-bqjgv\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.672967 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-scripts\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.673006 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3efcbfdc-c939-452c-96e5-244bc7b7bff3-log-httpd\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.673026 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3efcbfdc-c939-452c-96e5-244bc7b7bff3-run-httpd\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.673060 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-config-data\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.754547 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-jlkfr"] Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.775386 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.775876 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqjgv\" (UniqueName: \"kubernetes.io/projected/3efcbfdc-c939-452c-96e5-244bc7b7bff3-kube-api-access-bqjgv\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.776028 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-scripts\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.776146 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3efcbfdc-c939-452c-96e5-244bc7b7bff3-log-httpd\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.776229 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3efcbfdc-c939-452c-96e5-244bc7b7bff3-run-httpd\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.776336 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-config-data\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.776434 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.776782 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3efcbfdc-c939-452c-96e5-244bc7b7bff3-log-httpd\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.778330 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3efcbfdc-c939-452c-96e5-244bc7b7bff3-run-httpd\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.780223 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-p8h2h"] Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.780692 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-config-data\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.781086 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-scripts\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.787118 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.787634 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.802031 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqjgv\" (UniqueName: \"kubernetes.io/projected/3efcbfdc-c939-452c-96e5-244bc7b7bff3-kube-api-access-bqjgv\") pod \"ceilometer-0\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " pod="openstack/ceilometer-0" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.871107 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:23 crc kubenswrapper[4809]: I0930 00:29:23.901792 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:29:24 crc kubenswrapper[4809]: I0930 00:29:24.040457 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-qttpq"] Sep 30 00:29:24 crc kubenswrapper[4809]: W0930 00:29:24.056604 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod23ff9291_ad89_46a6_9d4a_adf9e545adb2.slice/crio-188956dc3f1d0cfa16a552ee6f0c83e959e66752799a9877fe6a3d166dd17672 WatchSource:0}: Error finding container 188956dc3f1d0cfa16a552ee6f0c83e959e66752799a9877fe6a3d166dd17672: Status 404 returned error can't find the container with id 188956dc3f1d0cfa16a552ee6f0c83e959e66752799a9877fe6a3d166dd17672 Sep 30 00:29:24 crc kubenswrapper[4809]: I0930 00:29:24.066094 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 00:29:24 crc kubenswrapper[4809]: I0930 00:29:24.522176 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-jlkfr" event={"ID":"5662ece6-c739-474c-bb4e-4ec3a1e00cff","Type":"ContainerStarted","Data":"bb10db75c5472efda1299a017e52c1ba85a1abafb9f99878972a3dfe411a0f8d"} Sep 30 00:29:24 crc kubenswrapper[4809]: I0930 00:29:24.527787 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-jlkfr" event={"ID":"5662ece6-c739-474c-bb4e-4ec3a1e00cff","Type":"ContainerStarted","Data":"9ce5b04f534e3a3b7dd7bb76f6cf04380c8c4c68df295c8b477f11587b3407dd"} Sep 30 00:29:24 crc kubenswrapper[4809]: I0930 00:29:24.527812 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" event={"ID":"17c2589f-a9fa-4c83-8622-85db46303f8d","Type":"ContainerDied","Data":"ce407c34ee376ba31ad90c07ecd06a9a1cabb8a4d1395b7eb1751644b876d822"} Sep 30 00:29:24 crc kubenswrapper[4809]: I0930 00:29:24.525279 4809 generic.go:334] "Generic (PLEG): container finished" podID="17c2589f-a9fa-4c83-8622-85db46303f8d" containerID="ce407c34ee376ba31ad90c07ecd06a9a1cabb8a4d1395b7eb1751644b876d822" exitCode=0 Sep 30 00:29:24 crc kubenswrapper[4809]: I0930 00:29:24.528551 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" event={"ID":"17c2589f-a9fa-4c83-8622-85db46303f8d","Type":"ContainerStarted","Data":"99d65317ee9d4699cd656ae80029f75778da50ad29e258ef924d35e3f2a7153c"} Sep 30 00:29:24 crc kubenswrapper[4809]: I0930 00:29:24.533304 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-qttpq" event={"ID":"23ff9291-ad89-46a6-9d4a-adf9e545adb2","Type":"ContainerStarted","Data":"188956dc3f1d0cfa16a552ee6f0c83e959e66752799a9877fe6a3d166dd17672"} Sep 30 00:29:24 crc kubenswrapper[4809]: I0930 00:29:24.548629 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-jlkfr" podStartSLOduration=2.548614619 podStartE2EDuration="2.548614619s" podCreationTimestamp="2025-09-30 00:29:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:29:24.544105748 +0000 UTC m=+1215.580355156" watchObservedRunningTime="2025-09-30 00:29:24.548614619 +0000 UTC m=+1215.584864017" Sep 30 00:29:24 crc kubenswrapper[4809]: I0930 00:29:24.598480 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-j7xdb"] Sep 30 00:29:24 crc kubenswrapper[4809]: I0930 00:29:24.620431 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-q8nkh"] Sep 30 00:29:24 crc kubenswrapper[4809]: I0930 00:29:24.730349 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-t9mpd"] Sep 30 00:29:24 crc kubenswrapper[4809]: W0930 00:29:24.758763 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3efcbfdc_c939_452c_96e5_244bc7b7bff3.slice/crio-9840dec362e5197a35ce62ed481e7519936bbb9f14f8934b7b467335949b028a WatchSource:0}: Error finding container 9840dec362e5197a35ce62ed481e7519936bbb9f14f8934b7b467335949b028a: Status 404 returned error can't find the container with id 9840dec362e5197a35ce62ed481e7519936bbb9f14f8934b7b467335949b028a Sep 30 00:29:24 crc kubenswrapper[4809]: I0930 00:29:24.768025 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-jkrgs"] Sep 30 00:29:24 crc kubenswrapper[4809]: I0930 00:29:24.809237 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-qwzxp"] Sep 30 00:29:24 crc kubenswrapper[4809]: I0930 00:29:24.820750 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.085524 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.123317 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-226lr\" (UniqueName: \"kubernetes.io/projected/17c2589f-a9fa-4c83-8622-85db46303f8d-kube-api-access-226lr\") pod \"17c2589f-a9fa-4c83-8622-85db46303f8d\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.123700 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-config\") pod \"17c2589f-a9fa-4c83-8622-85db46303f8d\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.123783 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-ovsdbserver-sb\") pod \"17c2589f-a9fa-4c83-8622-85db46303f8d\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.123859 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-ovsdbserver-nb\") pod \"17c2589f-a9fa-4c83-8622-85db46303f8d\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.123919 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-dns-svc\") pod \"17c2589f-a9fa-4c83-8622-85db46303f8d\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.123937 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-dns-swift-storage-0\") pod \"17c2589f-a9fa-4c83-8622-85db46303f8d\" (UID: \"17c2589f-a9fa-4c83-8622-85db46303f8d\") " Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.214820 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17c2589f-a9fa-4c83-8622-85db46303f8d-kube-api-access-226lr" (OuterVolumeSpecName: "kube-api-access-226lr") pod "17c2589f-a9fa-4c83-8622-85db46303f8d" (UID: "17c2589f-a9fa-4c83-8622-85db46303f8d"). InnerVolumeSpecName "kube-api-access-226lr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.215550 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "17c2589f-a9fa-4c83-8622-85db46303f8d" (UID: "17c2589f-a9fa-4c83-8622-85db46303f8d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.223040 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-config" (OuterVolumeSpecName: "config") pod "17c2589f-a9fa-4c83-8622-85db46303f8d" (UID: "17c2589f-a9fa-4c83-8622-85db46303f8d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.229181 4809 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.229214 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-226lr\" (UniqueName: \"kubernetes.io/projected/17c2589f-a9fa-4c83-8622-85db46303f8d-kube-api-access-226lr\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.229227 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.329840 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "17c2589f-a9fa-4c83-8622-85db46303f8d" (UID: "17c2589f-a9fa-4c83-8622-85db46303f8d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.330270 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "17c2589f-a9fa-4c83-8622-85db46303f8d" (UID: "17c2589f-a9fa-4c83-8622-85db46303f8d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.338842 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.338882 4809 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.345347 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "17c2589f-a9fa-4c83-8622-85db46303f8d" (UID: "17c2589f-a9fa-4c83-8622-85db46303f8d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.441458 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/17c2589f-a9fa-4c83-8622-85db46303f8d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.548814 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-q8nkh" event={"ID":"2e2cc1eb-6303-4607-a467-d21cc3c5d25d","Type":"ContainerStarted","Data":"c375dead6128511628d167eb7980be80bfbd62510271b4bf9fcc4fbe9335f600"} Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.560359 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-j7xdb" event={"ID":"a0424380-2493-4499-8314-274a6af06aa4","Type":"ContainerStarted","Data":"8d419f218018f9f557437322cf15156ebe4c2a4248368f6646bd9fb37412346f"} Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.560435 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-j7xdb" event={"ID":"a0424380-2493-4499-8314-274a6af06aa4","Type":"ContainerStarted","Data":"df6f59cb9585b44d181ab79815dbc4f0f64515c6f2f3180335db2a1541ba5c8d"} Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.567633 4809 generic.go:334] "Generic (PLEG): container finished" podID="c63835cb-f0af-438b-8213-da9cfb939cf9" containerID="c87fb27f4cdfffbabf12813ec03b38c6baebb046bea16bf668c792bd3d807aaf" exitCode=0 Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.567738 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" event={"ID":"c63835cb-f0af-438b-8213-da9cfb939cf9","Type":"ContainerDied","Data":"c87fb27f4cdfffbabf12813ec03b38c6baebb046bea16bf668c792bd3d807aaf"} Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.567813 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" event={"ID":"c63835cb-f0af-438b-8213-da9cfb939cf9","Type":"ContainerStarted","Data":"27d21cff35877d0eadcb2fc9d1b0e6e655ea67ae438f782cda26be4dadcb4354"} Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.569444 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-qwzxp" event={"ID":"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5","Type":"ContainerStarted","Data":"75b319ba399b2ab4ccbce39163f239d221e7474d21ec29cc7e2a12223d2ca1c2"} Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.580796 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-j7xdb" podStartSLOduration=3.580774087 podStartE2EDuration="3.580774087s" podCreationTimestamp="2025-09-30 00:29:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:29:25.576901693 +0000 UTC m=+1216.613151101" watchObservedRunningTime="2025-09-30 00:29:25.580774087 +0000 UTC m=+1216.617023505" Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.581956 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jkrgs" event={"ID":"a1ccba93-c72a-4aea-b972-c54bbe6bfc99","Type":"ContainerStarted","Data":"bf870fddcc14a3252216193326aa5bab4a44725734b7f853ebc61c342ab9c027"} Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.589953 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" event={"ID":"17c2589f-a9fa-4c83-8622-85db46303f8d","Type":"ContainerDied","Data":"99d65317ee9d4699cd656ae80029f75778da50ad29e258ef924d35e3f2a7153c"} Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.590006 4809 scope.go:117] "RemoveContainer" containerID="ce407c34ee376ba31ad90c07ecd06a9a1cabb8a4d1395b7eb1751644b876d822" Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.590197 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-p8h2h" Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.603536 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3efcbfdc-c939-452c-96e5-244bc7b7bff3","Type":"ContainerStarted","Data":"9840dec362e5197a35ce62ed481e7519936bbb9f14f8934b7b467335949b028a"} Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.686185 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-p8h2h"] Sep 30 00:29:25 crc kubenswrapper[4809]: I0930 00:29:25.714673 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-p8h2h"] Sep 30 00:29:26 crc kubenswrapper[4809]: I0930 00:29:26.060602 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:29:26 crc kubenswrapper[4809]: I0930 00:29:26.634103 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" event={"ID":"c63835cb-f0af-438b-8213-da9cfb939cf9","Type":"ContainerStarted","Data":"bade045d10c87a3d7585632fb8a94b37a23de259c14108a62beed46221f44bc7"} Sep 30 00:29:26 crc kubenswrapper[4809]: I0930 00:29:26.634472 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:26 crc kubenswrapper[4809]: I0930 00:29:26.656926 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" podStartSLOduration=3.65690992 podStartE2EDuration="3.65690992s" podCreationTimestamp="2025-09-30 00:29:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:29:26.653484788 +0000 UTC m=+1217.689734236" watchObservedRunningTime="2025-09-30 00:29:26.65690992 +0000 UTC m=+1217.693159318" Sep 30 00:29:27 crc kubenswrapper[4809]: I0930 00:29:27.652672 4809 generic.go:334] "Generic (PLEG): container finished" podID="e3c04595-d3a8-4477-9daf-46b43a8750dd" containerID="00e21decf5cad846ef0c19e1ed9c06bb4fe8db15585a3cb67a239628dd8195f6" exitCode=0 Sep 30 00:29:27 crc kubenswrapper[4809]: I0930 00:29:27.652730 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-lnq6w" event={"ID":"e3c04595-d3a8-4477-9daf-46b43a8750dd","Type":"ContainerDied","Data":"00e21decf5cad846ef0c19e1ed9c06bb4fe8db15585a3cb67a239628dd8195f6"} Sep 30 00:29:27 crc kubenswrapper[4809]: I0930 00:29:27.709691 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17c2589f-a9fa-4c83-8622-85db46303f8d" path="/var/lib/kubelet/pods/17c2589f-a9fa-4c83-8622-85db46303f8d/volumes" Sep 30 00:29:28 crc kubenswrapper[4809]: I0930 00:29:28.664431 4809 generic.go:334] "Generic (PLEG): container finished" podID="5662ece6-c739-474c-bb4e-4ec3a1e00cff" containerID="bb10db75c5472efda1299a017e52c1ba85a1abafb9f99878972a3dfe411a0f8d" exitCode=0 Sep 30 00:29:28 crc kubenswrapper[4809]: I0930 00:29:28.664495 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-jlkfr" event={"ID":"5662ece6-c739-474c-bb4e-4ec3a1e00cff","Type":"ContainerDied","Data":"bb10db75c5472efda1299a017e52c1ba85a1abafb9f99878972a3dfe411a0f8d"} Sep 30 00:29:33 crc kubenswrapper[4809]: I0930 00:29:33.874813 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:29:33 crc kubenswrapper[4809]: I0930 00:29:33.983969 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-d9jks"] Sep 30 00:29:33 crc kubenswrapper[4809]: I0930 00:29:33.984602 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" podUID="ef1a2ce9-936b-4adf-b07a-5790b5915b3d" containerName="dnsmasq-dns" containerID="cri-o://c0107e37ee0d442f160e3fd69f2a7f09be50715d85729d6b4303f2185a6fe709" gracePeriod=10 Sep 30 00:29:33 crc kubenswrapper[4809]: I0930 00:29:33.987888 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.019311 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-lnq6w" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.077651 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wrhwx\" (UniqueName: \"kubernetes.io/projected/5662ece6-c739-474c-bb4e-4ec3a1e00cff-kube-api-access-wrhwx\") pod \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.099989 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5662ece6-c739-474c-bb4e-4ec3a1e00cff-kube-api-access-wrhwx" (OuterVolumeSpecName: "kube-api-access-wrhwx") pod "5662ece6-c739-474c-bb4e-4ec3a1e00cff" (UID: "5662ece6-c739-474c-bb4e-4ec3a1e00cff"). InnerVolumeSpecName "kube-api-access-wrhwx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.188820 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3c04595-d3a8-4477-9daf-46b43a8750dd-config-data\") pod \"e3c04595-d3a8-4477-9daf-46b43a8750dd\" (UID: \"e3c04595-d3a8-4477-9daf-46b43a8750dd\") " Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.188902 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-credential-keys\") pod \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.188938 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47xf5\" (UniqueName: \"kubernetes.io/projected/e3c04595-d3a8-4477-9daf-46b43a8750dd-kube-api-access-47xf5\") pod \"e3c04595-d3a8-4477-9daf-46b43a8750dd\" (UID: \"e3c04595-d3a8-4477-9daf-46b43a8750dd\") " Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.188984 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e3c04595-d3a8-4477-9daf-46b43a8750dd-db-sync-config-data\") pod \"e3c04595-d3a8-4477-9daf-46b43a8750dd\" (UID: \"e3c04595-d3a8-4477-9daf-46b43a8750dd\") " Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.189083 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-config-data\") pod \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.189159 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3c04595-d3a8-4477-9daf-46b43a8750dd-combined-ca-bundle\") pod \"e3c04595-d3a8-4477-9daf-46b43a8750dd\" (UID: \"e3c04595-d3a8-4477-9daf-46b43a8750dd\") " Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.189231 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-scripts\") pod \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.189363 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-fernet-keys\") pod \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.189391 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-combined-ca-bundle\") pod \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\" (UID: \"5662ece6-c739-474c-bb4e-4ec3a1e00cff\") " Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.190265 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wrhwx\" (UniqueName: \"kubernetes.io/projected/5662ece6-c739-474c-bb4e-4ec3a1e00cff-kube-api-access-wrhwx\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.214674 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3c04595-d3a8-4477-9daf-46b43a8750dd-kube-api-access-47xf5" (OuterVolumeSpecName: "kube-api-access-47xf5") pod "e3c04595-d3a8-4477-9daf-46b43a8750dd" (UID: "e3c04595-d3a8-4477-9daf-46b43a8750dd"). InnerVolumeSpecName "kube-api-access-47xf5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.215439 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "5662ece6-c739-474c-bb4e-4ec3a1e00cff" (UID: "5662ece6-c739-474c-bb4e-4ec3a1e00cff"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.215878 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "5662ece6-c739-474c-bb4e-4ec3a1e00cff" (UID: "5662ece6-c739-474c-bb4e-4ec3a1e00cff"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.219903 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3c04595-d3a8-4477-9daf-46b43a8750dd-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "e3c04595-d3a8-4477-9daf-46b43a8750dd" (UID: "e3c04595-d3a8-4477-9daf-46b43a8750dd"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.225146 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-scripts" (OuterVolumeSpecName: "scripts") pod "5662ece6-c739-474c-bb4e-4ec3a1e00cff" (UID: "5662ece6-c739-474c-bb4e-4ec3a1e00cff"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.253478 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-config-data" (OuterVolumeSpecName: "config-data") pod "5662ece6-c739-474c-bb4e-4ec3a1e00cff" (UID: "5662ece6-c739-474c-bb4e-4ec3a1e00cff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.253602 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5662ece6-c739-474c-bb4e-4ec3a1e00cff" (UID: "5662ece6-c739-474c-bb4e-4ec3a1e00cff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.273008 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3c04595-d3a8-4477-9daf-46b43a8750dd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e3c04595-d3a8-4477-9daf-46b43a8750dd" (UID: "e3c04595-d3a8-4477-9daf-46b43a8750dd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.293174 4809 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.293272 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.293335 4809 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.293392 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47xf5\" (UniqueName: \"kubernetes.io/projected/e3c04595-d3a8-4477-9daf-46b43a8750dd-kube-api-access-47xf5\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.293449 4809 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e3c04595-d3a8-4477-9daf-46b43a8750dd-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.293507 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.293564 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3c04595-d3a8-4477-9daf-46b43a8750dd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.293634 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5662ece6-c739-474c-bb4e-4ec3a1e00cff-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.301454 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3c04595-d3a8-4477-9daf-46b43a8750dd-config-data" (OuterVolumeSpecName: "config-data") pod "e3c04595-d3a8-4477-9daf-46b43a8750dd" (UID: "e3c04595-d3a8-4477-9daf-46b43a8750dd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.395119 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3c04595-d3a8-4477-9daf-46b43a8750dd-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.743504 4809 generic.go:334] "Generic (PLEG): container finished" podID="ef1a2ce9-936b-4adf-b07a-5790b5915b3d" containerID="c0107e37ee0d442f160e3fd69f2a7f09be50715d85729d6b4303f2185a6fe709" exitCode=0 Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.743595 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" event={"ID":"ef1a2ce9-936b-4adf-b07a-5790b5915b3d","Type":"ContainerDied","Data":"c0107e37ee0d442f160e3fd69f2a7f09be50715d85729d6b4303f2185a6fe709"} Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.745608 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-lnq6w" event={"ID":"e3c04595-d3a8-4477-9daf-46b43a8750dd","Type":"ContainerDied","Data":"84f6a3e93ce5a9853700b32df5dba96f16a2ab9a37917e887c738523daa19d79"} Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.745666 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-lnq6w" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.745669 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84f6a3e93ce5a9853700b32df5dba96f16a2ab9a37917e887c738523daa19d79" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.748339 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-jlkfr" event={"ID":"5662ece6-c739-474c-bb4e-4ec3a1e00cff","Type":"ContainerDied","Data":"9ce5b04f534e3a3b7dd7bb76f6cf04380c8c4c68df295c8b477f11587b3407dd"} Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.748381 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ce5b04f534e3a3b7dd7bb76f6cf04380c8c4c68df295c8b477f11587b3407dd" Sep 30 00:29:34 crc kubenswrapper[4809]: I0930 00:29:34.748428 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-jlkfr" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.180100 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-jlkfr"] Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.190572 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-jlkfr"] Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.312746 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-v6m7r"] Sep 30 00:29:35 crc kubenswrapper[4809]: E0930 00:29:35.313180 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17c2589f-a9fa-4c83-8622-85db46303f8d" containerName="init" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.313195 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="17c2589f-a9fa-4c83-8622-85db46303f8d" containerName="init" Sep 30 00:29:35 crc kubenswrapper[4809]: E0930 00:29:35.313219 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3c04595-d3a8-4477-9daf-46b43a8750dd" containerName="glance-db-sync" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.313227 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3c04595-d3a8-4477-9daf-46b43a8750dd" containerName="glance-db-sync" Sep 30 00:29:35 crc kubenswrapper[4809]: E0930 00:29:35.313242 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5662ece6-c739-474c-bb4e-4ec3a1e00cff" containerName="keystone-bootstrap" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.313248 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5662ece6-c739-474c-bb4e-4ec3a1e00cff" containerName="keystone-bootstrap" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.313430 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="17c2589f-a9fa-4c83-8622-85db46303f8d" containerName="init" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.313444 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5662ece6-c739-474c-bb4e-4ec3a1e00cff" containerName="keystone-bootstrap" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.313464 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3c04595-d3a8-4477-9daf-46b43a8750dd" containerName="glance-db-sync" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.314157 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.318998 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.319032 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-8vz6r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.319131 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.319227 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.337616 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-v6m7r"] Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.417417 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-config-data\") pod \"keystone-bootstrap-v6m7r\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.417488 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-scripts\") pod \"keystone-bootstrap-v6m7r\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.417538 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-combined-ca-bundle\") pod \"keystone-bootstrap-v6m7r\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.417589 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-fernet-keys\") pod \"keystone-bootstrap-v6m7r\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.417741 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-credential-keys\") pod \"keystone-bootstrap-v6m7r\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.417807 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5cvl\" (UniqueName: \"kubernetes.io/projected/956e79d1-b7da-4b76-8ba6-21dd3838aff1-kube-api-access-d5cvl\") pod \"keystone-bootstrap-v6m7r\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.432712 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-hk2cl"] Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.434917 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.455191 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-hk2cl"] Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.519819 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-config-data\") pod \"keystone-bootstrap-v6m7r\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.519863 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-scripts\") pod \"keystone-bootstrap-v6m7r\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.519894 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gstvt\" (UniqueName: \"kubernetes.io/projected/736768f4-e809-4c81-879f-f624fcf5c479-kube-api-access-gstvt\") pod \"dnsmasq-dns-8b5c85b87-hk2cl\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.519913 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-config\") pod \"dnsmasq-dns-8b5c85b87-hk2cl\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.519939 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-combined-ca-bundle\") pod \"keystone-bootstrap-v6m7r\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.519957 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-hk2cl\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.519974 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-hk2cl\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.520005 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-fernet-keys\") pod \"keystone-bootstrap-v6m7r\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.520053 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-credential-keys\") pod \"keystone-bootstrap-v6m7r\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.520101 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5cvl\" (UniqueName: \"kubernetes.io/projected/956e79d1-b7da-4b76-8ba6-21dd3838aff1-kube-api-access-d5cvl\") pod \"keystone-bootstrap-v6m7r\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.520132 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-hk2cl\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.520147 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-hk2cl\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.526930 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-scripts\") pod \"keystone-bootstrap-v6m7r\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.527305 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-config-data\") pod \"keystone-bootstrap-v6m7r\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.527552 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-combined-ca-bundle\") pod \"keystone-bootstrap-v6m7r\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.529018 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-credential-keys\") pod \"keystone-bootstrap-v6m7r\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.541626 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5cvl\" (UniqueName: \"kubernetes.io/projected/956e79d1-b7da-4b76-8ba6-21dd3838aff1-kube-api-access-d5cvl\") pod \"keystone-bootstrap-v6m7r\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.551273 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-fernet-keys\") pod \"keystone-bootstrap-v6m7r\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.621504 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-hk2cl\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.621572 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-hk2cl\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.621662 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gstvt\" (UniqueName: \"kubernetes.io/projected/736768f4-e809-4c81-879f-f624fcf5c479-kube-api-access-gstvt\") pod \"dnsmasq-dns-8b5c85b87-hk2cl\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.621686 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-config\") pod \"dnsmasq-dns-8b5c85b87-hk2cl\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.621720 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-hk2cl\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.621744 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-hk2cl\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.622486 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-hk2cl\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.622860 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-hk2cl\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.623083 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-hk2cl\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.623548 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-config\") pod \"dnsmasq-dns-8b5c85b87-hk2cl\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.623480 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-hk2cl\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.640259 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.645360 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gstvt\" (UniqueName: \"kubernetes.io/projected/736768f4-e809-4c81-879f-f624fcf5c479-kube-api-access-gstvt\") pod \"dnsmasq-dns-8b5c85b87-hk2cl\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.704210 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5662ece6-c739-474c-bb4e-4ec3a1e00cff" path="/var/lib/kubelet/pods/5662ece6-c739-474c-bb4e-4ec3a1e00cff/volumes" Sep 30 00:29:35 crc kubenswrapper[4809]: I0930 00:29:35.758408 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:38 crc kubenswrapper[4809]: I0930 00:29:38.686154 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" podUID="ef1a2ce9-936b-4adf-b07a-5790b5915b3d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.150:5353: connect: connection refused" Sep 30 00:29:42 crc kubenswrapper[4809]: I0930 00:29:42.827121 4809 generic.go:334] "Generic (PLEG): container finished" podID="a0424380-2493-4499-8314-274a6af06aa4" containerID="8d419f218018f9f557437322cf15156ebe4c2a4248368f6646bd9fb37412346f" exitCode=0 Sep 30 00:29:42 crc kubenswrapper[4809]: I0930 00:29:42.827343 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-j7xdb" event={"ID":"a0424380-2493-4499-8314-274a6af06aa4","Type":"ContainerDied","Data":"8d419f218018f9f557437322cf15156ebe4c2a4248368f6646bd9fb37412346f"} Sep 30 00:29:43 crc kubenswrapper[4809]: I0930 00:29:43.685577 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" podUID="ef1a2ce9-936b-4adf-b07a-5790b5915b3d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.150:5353: connect: connection refused" Sep 30 00:29:48 crc kubenswrapper[4809]: I0930 00:29:48.685409 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" podUID="ef1a2ce9-936b-4adf-b07a-5790b5915b3d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.150:5353: connect: connection refused" Sep 30 00:29:48 crc kubenswrapper[4809]: I0930 00:29:48.686125 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:29:49 crc kubenswrapper[4809]: I0930 00:29:49.458801 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-j7xdb" Sep 30 00:29:49 crc kubenswrapper[4809]: I0930 00:29:49.603941 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a0424380-2493-4499-8314-274a6af06aa4-config\") pod \"a0424380-2493-4499-8314-274a6af06aa4\" (UID: \"a0424380-2493-4499-8314-274a6af06aa4\") " Sep 30 00:29:49 crc kubenswrapper[4809]: I0930 00:29:49.604071 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9w4vm\" (UniqueName: \"kubernetes.io/projected/a0424380-2493-4499-8314-274a6af06aa4-kube-api-access-9w4vm\") pod \"a0424380-2493-4499-8314-274a6af06aa4\" (UID: \"a0424380-2493-4499-8314-274a6af06aa4\") " Sep 30 00:29:49 crc kubenswrapper[4809]: I0930 00:29:49.604118 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0424380-2493-4499-8314-274a6af06aa4-combined-ca-bundle\") pod \"a0424380-2493-4499-8314-274a6af06aa4\" (UID: \"a0424380-2493-4499-8314-274a6af06aa4\") " Sep 30 00:29:49 crc kubenswrapper[4809]: I0930 00:29:49.610041 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0424380-2493-4499-8314-274a6af06aa4-kube-api-access-9w4vm" (OuterVolumeSpecName: "kube-api-access-9w4vm") pod "a0424380-2493-4499-8314-274a6af06aa4" (UID: "a0424380-2493-4499-8314-274a6af06aa4"). InnerVolumeSpecName "kube-api-access-9w4vm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:29:49 crc kubenswrapper[4809]: I0930 00:29:49.635042 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0424380-2493-4499-8314-274a6af06aa4-config" (OuterVolumeSpecName: "config") pod "a0424380-2493-4499-8314-274a6af06aa4" (UID: "a0424380-2493-4499-8314-274a6af06aa4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:29:49 crc kubenswrapper[4809]: I0930 00:29:49.640023 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0424380-2493-4499-8314-274a6af06aa4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a0424380-2493-4499-8314-274a6af06aa4" (UID: "a0424380-2493-4499-8314-274a6af06aa4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:29:49 crc kubenswrapper[4809]: I0930 00:29:49.706152 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/a0424380-2493-4499-8314-274a6af06aa4-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:49 crc kubenswrapper[4809]: I0930 00:29:49.706188 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9w4vm\" (UniqueName: \"kubernetes.io/projected/a0424380-2493-4499-8314-274a6af06aa4-kube-api-access-9w4vm\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:49 crc kubenswrapper[4809]: I0930 00:29:49.706210 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0424380-2493-4499-8314-274a6af06aa4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:49 crc kubenswrapper[4809]: I0930 00:29:49.908066 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-j7xdb" event={"ID":"a0424380-2493-4499-8314-274a6af06aa4","Type":"ContainerDied","Data":"df6f59cb9585b44d181ab79815dbc4f0f64515c6f2f3180335db2a1541ba5c8d"} Sep 30 00:29:49 crc kubenswrapper[4809]: I0930 00:29:49.908627 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df6f59cb9585b44d181ab79815dbc4f0f64515c6f2f3180335db2a1541ba5c8d" Sep 30 00:29:49 crc kubenswrapper[4809]: I0930 00:29:49.908124 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-j7xdb" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.625520 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-hk2cl"] Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.659022 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-bdvnx"] Sep 30 00:29:50 crc kubenswrapper[4809]: E0930 00:29:50.659563 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0424380-2493-4499-8314-274a6af06aa4" containerName="neutron-db-sync" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.659579 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0424380-2493-4499-8314-274a6af06aa4" containerName="neutron-db-sync" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.660988 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0424380-2493-4499-8314-274a6af06aa4" containerName="neutron-db-sync" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.662180 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.691770 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-bdvnx"] Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.723700 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5c58bdbf84-bjfsw"] Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.725441 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.731229 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-bdvnx\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.731377 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zqtf\" (UniqueName: \"kubernetes.io/projected/ab502782-7769-460f-8a2f-86d4886ec40a-kube-api-access-5zqtf\") pod \"dnsmasq-dns-84b966f6c9-bdvnx\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.731410 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-config\") pod \"dnsmasq-dns-84b966f6c9-bdvnx\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.731434 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-bdvnx\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.731449 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-bdvnx\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.731472 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-bdvnx\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.732034 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5c58bdbf84-bjfsw"] Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.742951 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-2dtft" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.743659 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.745508 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.745634 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Sep 30 00:29:50 crc kubenswrapper[4809]: E0930 00:29:50.749013 4809 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Sep 30 00:29:50 crc kubenswrapper[4809]: E0930 00:29:50.749185 4809 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-m75gn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-jkrgs_openstack(a1ccba93-c72a-4aea-b972-c54bbe6bfc99): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 00:29:50 crc kubenswrapper[4809]: E0930 00:29:50.753761 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-jkrgs" podUID="a1ccba93-c72a-4aea-b972-c54bbe6bfc99" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.833320 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-httpd-config\") pod \"neutron-5c58bdbf84-bjfsw\" (UID: \"54445277-2cb9-4dad-b1a4-0b0569ef0088\") " pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.833611 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zqtf\" (UniqueName: \"kubernetes.io/projected/ab502782-7769-460f-8a2f-86d4886ec40a-kube-api-access-5zqtf\") pod \"dnsmasq-dns-84b966f6c9-bdvnx\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.833636 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-combined-ca-bundle\") pod \"neutron-5c58bdbf84-bjfsw\" (UID: \"54445277-2cb9-4dad-b1a4-0b0569ef0088\") " pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.833672 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-config\") pod \"dnsmasq-dns-84b966f6c9-bdvnx\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.833696 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-bdvnx\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.833713 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-bdvnx\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.833737 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-ovndb-tls-certs\") pod \"neutron-5c58bdbf84-bjfsw\" (UID: \"54445277-2cb9-4dad-b1a4-0b0569ef0088\") " pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.833752 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-bdvnx\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.833994 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-bdvnx\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.834075 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9qjm\" (UniqueName: \"kubernetes.io/projected/54445277-2cb9-4dad-b1a4-0b0569ef0088-kube-api-access-d9qjm\") pod \"neutron-5c58bdbf84-bjfsw\" (UID: \"54445277-2cb9-4dad-b1a4-0b0569ef0088\") " pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.834331 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-config\") pod \"neutron-5c58bdbf84-bjfsw\" (UID: \"54445277-2cb9-4dad-b1a4-0b0569ef0088\") " pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.834549 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-config\") pod \"dnsmasq-dns-84b966f6c9-bdvnx\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.834556 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-bdvnx\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.834756 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-bdvnx\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.834880 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-bdvnx\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.835531 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-bdvnx\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.854506 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zqtf\" (UniqueName: \"kubernetes.io/projected/ab502782-7769-460f-8a2f-86d4886ec40a-kube-api-access-5zqtf\") pod \"dnsmasq-dns-84b966f6c9-bdvnx\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:50 crc kubenswrapper[4809]: E0930 00:29:50.922828 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-jkrgs" podUID="a1ccba93-c72a-4aea-b972-c54bbe6bfc99" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.936743 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-ovndb-tls-certs\") pod \"neutron-5c58bdbf84-bjfsw\" (UID: \"54445277-2cb9-4dad-b1a4-0b0569ef0088\") " pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.936883 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9qjm\" (UniqueName: \"kubernetes.io/projected/54445277-2cb9-4dad-b1a4-0b0569ef0088-kube-api-access-d9qjm\") pod \"neutron-5c58bdbf84-bjfsw\" (UID: \"54445277-2cb9-4dad-b1a4-0b0569ef0088\") " pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.936943 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-config\") pod \"neutron-5c58bdbf84-bjfsw\" (UID: \"54445277-2cb9-4dad-b1a4-0b0569ef0088\") " pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.937001 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-httpd-config\") pod \"neutron-5c58bdbf84-bjfsw\" (UID: \"54445277-2cb9-4dad-b1a4-0b0569ef0088\") " pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.937038 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-combined-ca-bundle\") pod \"neutron-5c58bdbf84-bjfsw\" (UID: \"54445277-2cb9-4dad-b1a4-0b0569ef0088\") " pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.940560 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-combined-ca-bundle\") pod \"neutron-5c58bdbf84-bjfsw\" (UID: \"54445277-2cb9-4dad-b1a4-0b0569ef0088\") " pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.944251 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-httpd-config\") pod \"neutron-5c58bdbf84-bjfsw\" (UID: \"54445277-2cb9-4dad-b1a4-0b0569ef0088\") " pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.953898 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-config\") pod \"neutron-5c58bdbf84-bjfsw\" (UID: \"54445277-2cb9-4dad-b1a4-0b0569ef0088\") " pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.956180 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9qjm\" (UniqueName: \"kubernetes.io/projected/54445277-2cb9-4dad-b1a4-0b0569ef0088-kube-api-access-d9qjm\") pod \"neutron-5c58bdbf84-bjfsw\" (UID: \"54445277-2cb9-4dad-b1a4-0b0569ef0088\") " pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:29:50 crc kubenswrapper[4809]: I0930 00:29:50.969516 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-ovndb-tls-certs\") pod \"neutron-5c58bdbf84-bjfsw\" (UID: \"54445277-2cb9-4dad-b1a4-0b0569ef0088\") " pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:29:51 crc kubenswrapper[4809]: I0930 00:29:51.001858 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:51 crc kubenswrapper[4809]: I0930 00:29:51.086139 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:29:51 crc kubenswrapper[4809]: E0930 00:29:51.274306 4809 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified" Sep 30 00:29:51 crc kubenswrapper[4809]: E0930 00:29:51.274785 4809 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:heat-db-sync,Image:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,Command:[/bin/bash],Args:[-c /usr/bin/heat-manage --config-dir /etc/heat/heat.conf.d db_sync],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/heat/heat.conf.d/00-default.conf,SubPath:00-default.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/heat/heat.conf.d/01-custom.conf,SubPath:01-custom.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zzxz6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42418,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42418,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-db-sync-qttpq_openstack(23ff9291-ad89-46a6-9d4a-adf9e545adb2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 00:29:51 crc kubenswrapper[4809]: E0930 00:29:51.276808 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/heat-db-sync-qttpq" podUID="23ff9291-ad89-46a6-9d4a-adf9e545adb2" Sep 30 00:29:51 crc kubenswrapper[4809]: E0930 00:29:51.667364 4809 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Sep 30 00:29:51 crc kubenswrapper[4809]: E0930 00:29:51.667513 4809 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n57dh84h576h8bh66fh95h696hc4h68bh649h86h5b9h684h8dh545hbh55bh647h98h5dbh54hc7hfh7fh8fh5d9h65ch8bh57ch554h5bfh5fq,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bqjgv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(3efcbfdc-c939-452c-96e5-244bc7b7bff3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 00:29:51 crc kubenswrapper[4809]: E0930 00:29:51.931383 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified\\\"\"" pod="openstack/heat-db-sync-qttpq" podUID="23ff9291-ad89-46a6-9d4a-adf9e545adb2" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.080669 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-579f9b76b9-drlks"] Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.086202 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.094207 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.094401 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.099298 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-579f9b76b9-drlks"] Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.208379 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/05c44209-faa8-4b7d-a127-9a69b13d8e10-ovndb-tls-certs\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.208439 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/05c44209-faa8-4b7d-a127-9a69b13d8e10-internal-tls-certs\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.208497 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/05c44209-faa8-4b7d-a127-9a69b13d8e10-httpd-config\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.208596 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/05c44209-faa8-4b7d-a127-9a69b13d8e10-public-tls-certs\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.208623 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c44209-faa8-4b7d-a127-9a69b13d8e10-combined-ca-bundle\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.208690 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/05c44209-faa8-4b7d-a127-9a69b13d8e10-config\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.208772 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mqtj\" (UniqueName: \"kubernetes.io/projected/05c44209-faa8-4b7d-a127-9a69b13d8e10-kube-api-access-2mqtj\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.310398 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c44209-faa8-4b7d-a127-9a69b13d8e10-combined-ca-bundle\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.310480 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/05c44209-faa8-4b7d-a127-9a69b13d8e10-config\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.310534 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mqtj\" (UniqueName: \"kubernetes.io/projected/05c44209-faa8-4b7d-a127-9a69b13d8e10-kube-api-access-2mqtj\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.310589 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/05c44209-faa8-4b7d-a127-9a69b13d8e10-ovndb-tls-certs\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.310618 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/05c44209-faa8-4b7d-a127-9a69b13d8e10-internal-tls-certs\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.310662 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/05c44209-faa8-4b7d-a127-9a69b13d8e10-httpd-config\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.310725 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/05c44209-faa8-4b7d-a127-9a69b13d8e10-public-tls-certs\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.317162 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/05c44209-faa8-4b7d-a127-9a69b13d8e10-httpd-config\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.319052 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/05c44209-faa8-4b7d-a127-9a69b13d8e10-ovndb-tls-certs\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.319345 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c44209-faa8-4b7d-a127-9a69b13d8e10-combined-ca-bundle\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.320046 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/05c44209-faa8-4b7d-a127-9a69b13d8e10-internal-tls-certs\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.320290 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/05c44209-faa8-4b7d-a127-9a69b13d8e10-public-tls-certs\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.320413 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/05c44209-faa8-4b7d-a127-9a69b13d8e10-config\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.328855 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mqtj\" (UniqueName: \"kubernetes.io/projected/05c44209-faa8-4b7d-a127-9a69b13d8e10-kube-api-access-2mqtj\") pod \"neutron-579f9b76b9-drlks\" (UID: \"05c44209-faa8-4b7d-a127-9a69b13d8e10\") " pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: E0930 00:29:53.375118 4809 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Sep 30 00:29:53 crc kubenswrapper[4809]: E0930 00:29:53.375810 4809 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sw25b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-qwzxp_openstack(f26e19e3-0ece-4f4d-aa5a-016fbfd929a5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 00:29:53 crc kubenswrapper[4809]: E0930 00:29:53.377002 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-qwzxp" podUID="f26e19e3-0ece-4f4d-aa5a-016fbfd929a5" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.411143 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.734638 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.830990 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-dns-svc\") pod \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.831036 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rp65v\" (UniqueName: \"kubernetes.io/projected/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-kube-api-access-rp65v\") pod \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.831139 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-ovsdbserver-sb\") pod \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.831202 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-config\") pod \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.831223 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-dns-swift-storage-0\") pod \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.831301 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-ovsdbserver-nb\") pod \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\" (UID: \"ef1a2ce9-936b-4adf-b07a-5790b5915b3d\") " Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.842087 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-kube-api-access-rp65v" (OuterVolumeSpecName: "kube-api-access-rp65v") pod "ef1a2ce9-936b-4adf-b07a-5790b5915b3d" (UID: "ef1a2ce9-936b-4adf-b07a-5790b5915b3d"). InnerVolumeSpecName "kube-api-access-rp65v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.894067 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ef1a2ce9-936b-4adf-b07a-5790b5915b3d" (UID: "ef1a2ce9-936b-4adf-b07a-5790b5915b3d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.905010 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-config" (OuterVolumeSpecName: "config") pod "ef1a2ce9-936b-4adf-b07a-5790b5915b3d" (UID: "ef1a2ce9-936b-4adf-b07a-5790b5915b3d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.918705 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ef1a2ce9-936b-4adf-b07a-5790b5915b3d" (UID: "ef1a2ce9-936b-4adf-b07a-5790b5915b3d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.919832 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ef1a2ce9-936b-4adf-b07a-5790b5915b3d" (UID: "ef1a2ce9-936b-4adf-b07a-5790b5915b3d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.922230 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ef1a2ce9-936b-4adf-b07a-5790b5915b3d" (UID: "ef1a2ce9-936b-4adf-b07a-5790b5915b3d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.946800 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.946835 4809 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.946848 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rp65v\" (UniqueName: \"kubernetes.io/projected/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-kube-api-access-rp65v\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.946921 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.946939 4809 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.946950 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef1a2ce9-936b-4adf-b07a-5790b5915b3d-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.958019 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-q8nkh" event={"ID":"2e2cc1eb-6303-4607-a467-d21cc3c5d25d","Type":"ContainerStarted","Data":"c4b4d496c7b5fc826a057c826cfaf62782016b670bf896f7ae51b9871331c63b"} Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.963906 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" event={"ID":"ef1a2ce9-936b-4adf-b07a-5790b5915b3d","Type":"ContainerDied","Data":"f16a969aed27a804e4df2b773d7283c69a179ad1b75fee5f65a4132b11eb7cc5"} Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.963934 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.963966 4809 scope.go:117] "RemoveContainer" containerID="c0107e37ee0d442f160e3fd69f2a7f09be50715d85729d6b4303f2185a6fe709" Sep 30 00:29:53 crc kubenswrapper[4809]: E0930 00:29:53.965031 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-qwzxp" podUID="f26e19e3-0ece-4f4d-aa5a-016fbfd929a5" Sep 30 00:29:53 crc kubenswrapper[4809]: I0930 00:29:53.986419 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-q8nkh" podStartSLOduration=2.298181256 podStartE2EDuration="30.986397751s" podCreationTimestamp="2025-09-30 00:29:23 +0000 UTC" firstStartedPulling="2025-09-30 00:29:24.656075775 +0000 UTC m=+1215.692325183" lastFinishedPulling="2025-09-30 00:29:53.34429227 +0000 UTC m=+1244.380541678" observedRunningTime="2025-09-30 00:29:53.974776314 +0000 UTC m=+1245.011025722" watchObservedRunningTime="2025-09-30 00:29:53.986397751 +0000 UTC m=+1245.022647159" Sep 30 00:29:54 crc kubenswrapper[4809]: I0930 00:29:54.033539 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-v6m7r"] Sep 30 00:29:54 crc kubenswrapper[4809]: I0930 00:29:54.057583 4809 scope.go:117] "RemoveContainer" containerID="938f9567f9f9502227e3745b31bad23d2fd1ff8abda15964aced4de503c54298" Sep 30 00:29:54 crc kubenswrapper[4809]: I0930 00:29:54.059545 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-d9jks"] Sep 30 00:29:54 crc kubenswrapper[4809]: W0930 00:29:54.089059 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod956e79d1_b7da_4b76_8ba6_21dd3838aff1.slice/crio-e1e143294b5493cd789b8a2acd669934c09bfc84bb08f08c63ab10c33954292e WatchSource:0}: Error finding container e1e143294b5493cd789b8a2acd669934c09bfc84bb08f08c63ab10c33954292e: Status 404 returned error can't find the container with id e1e143294b5493cd789b8a2acd669934c09bfc84bb08f08c63ab10c33954292e Sep 30 00:29:54 crc kubenswrapper[4809]: I0930 00:29:54.090269 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-hk2cl"] Sep 30 00:29:54 crc kubenswrapper[4809]: I0930 00:29:54.104144 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-d9jks"] Sep 30 00:29:54 crc kubenswrapper[4809]: W0930 00:29:54.104990 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod736768f4_e809_4c81_879f_f624fcf5c479.slice/crio-650ff01db28040c11c66f2bd1b50e8a9ff81184ec3a18462693003dc2f110ba1 WatchSource:0}: Error finding container 650ff01db28040c11c66f2bd1b50e8a9ff81184ec3a18462693003dc2f110ba1: Status 404 returned error can't find the container with id 650ff01db28040c11c66f2bd1b50e8a9ff81184ec3a18462693003dc2f110ba1 Sep 30 00:29:54 crc kubenswrapper[4809]: I0930 00:29:54.471685 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-bdvnx"] Sep 30 00:29:54 crc kubenswrapper[4809]: I0930 00:29:54.534044 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5c58bdbf84-bjfsw"] Sep 30 00:29:54 crc kubenswrapper[4809]: I0930 00:29:54.628540 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-579f9b76b9-drlks"] Sep 30 00:29:54 crc kubenswrapper[4809]: W0930 00:29:54.695467 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod54445277_2cb9_4dad_b1a4_0b0569ef0088.slice/crio-75ec8470b0e3238f61f3ec37f1ff751c8568546f034ed1994e38e512208b09f3 WatchSource:0}: Error finding container 75ec8470b0e3238f61f3ec37f1ff751c8568546f034ed1994e38e512208b09f3: Status 404 returned error can't find the container with id 75ec8470b0e3238f61f3ec37f1ff751c8568546f034ed1994e38e512208b09f3 Sep 30 00:29:54 crc kubenswrapper[4809]: W0930 00:29:54.708629 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab502782_7769_460f_8a2f_86d4886ec40a.slice/crio-f7163504a2fc86ed9e4f89724aa04ea5a5279d06b13e90b6f4ea0be6076b945d WatchSource:0}: Error finding container f7163504a2fc86ed9e4f89724aa04ea5a5279d06b13e90b6f4ea0be6076b945d: Status 404 returned error can't find the container with id f7163504a2fc86ed9e4f89724aa04ea5a5279d06b13e90b6f4ea0be6076b945d Sep 30 00:29:54 crc kubenswrapper[4809]: I0930 00:29:54.986890 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-v6m7r" event={"ID":"956e79d1-b7da-4b76-8ba6-21dd3838aff1","Type":"ContainerStarted","Data":"e1e143294b5493cd789b8a2acd669934c09bfc84bb08f08c63ab10c33954292e"} Sep 30 00:29:54 crc kubenswrapper[4809]: I0930 00:29:54.992353 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-579f9b76b9-drlks" event={"ID":"05c44209-faa8-4b7d-a127-9a69b13d8e10","Type":"ContainerStarted","Data":"0e4d1ea745bf06d48dd03249e4c50334b5e2c1dbd6eb72a80d243176badc1dd4"} Sep 30 00:29:54 crc kubenswrapper[4809]: I0930 00:29:54.993995 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" event={"ID":"736768f4-e809-4c81-879f-f624fcf5c479","Type":"ContainerStarted","Data":"650ff01db28040c11c66f2bd1b50e8a9ff81184ec3a18462693003dc2f110ba1"} Sep 30 00:29:54 crc kubenswrapper[4809]: I0930 00:29:54.996075 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c58bdbf84-bjfsw" event={"ID":"54445277-2cb9-4dad-b1a4-0b0569ef0088","Type":"ContainerStarted","Data":"75ec8470b0e3238f61f3ec37f1ff751c8568546f034ed1994e38e512208b09f3"} Sep 30 00:29:54 crc kubenswrapper[4809]: I0930 00:29:54.997188 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" event={"ID":"ab502782-7769-460f-8a2f-86d4886ec40a","Type":"ContainerStarted","Data":"f7163504a2fc86ed9e4f89724aa04ea5a5279d06b13e90b6f4ea0be6076b945d"} Sep 30 00:29:55 crc kubenswrapper[4809]: I0930 00:29:55.702439 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef1a2ce9-936b-4adf-b07a-5790b5915b3d" path="/var/lib/kubelet/pods/ef1a2ce9-936b-4adf-b07a-5790b5915b3d/volumes" Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.027314 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-v6m7r" event={"ID":"956e79d1-b7da-4b76-8ba6-21dd3838aff1","Type":"ContainerStarted","Data":"4f5792ddd193c4566d9b946b885da6715b32632d7a3d069d8dc77fa7506830d4"} Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.031887 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-579f9b76b9-drlks" event={"ID":"05c44209-faa8-4b7d-a127-9a69b13d8e10","Type":"ContainerStarted","Data":"f926caeae4fbfe8a7c56c7fe2f2dafaa2ad19e49265b709d437adf73c90e60d0"} Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.031916 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-579f9b76b9-drlks" event={"ID":"05c44209-faa8-4b7d-a127-9a69b13d8e10","Type":"ContainerStarted","Data":"9ebf95adbfaa5e6c4d74feaf2db67ea481d377f4eaa5f54119cab5ca143bf5ad"} Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.032798 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.036368 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3efcbfdc-c939-452c-96e5-244bc7b7bff3","Type":"ContainerStarted","Data":"455d26f111af53890f2941dc4d58fcd120946cf076ba6d2bee17153851548f6a"} Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.038300 4809 generic.go:334] "Generic (PLEG): container finished" podID="736768f4-e809-4c81-879f-f624fcf5c479" containerID="3c051da2d386eeb843e70e88272d2054e729c358f8717128d6c1fa9bb829f826" exitCode=0 Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.038351 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" event={"ID":"736768f4-e809-4c81-879f-f624fcf5c479","Type":"ContainerDied","Data":"3c051da2d386eeb843e70e88272d2054e729c358f8717128d6c1fa9bb829f826"} Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.053118 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-v6m7r" podStartSLOduration=21.053102727 podStartE2EDuration="21.053102727s" podCreationTimestamp="2025-09-30 00:29:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:29:56.049363746 +0000 UTC m=+1247.085613154" watchObservedRunningTime="2025-09-30 00:29:56.053102727 +0000 UTC m=+1247.089352125" Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.055227 4809 generic.go:334] "Generic (PLEG): container finished" podID="2e2cc1eb-6303-4607-a467-d21cc3c5d25d" containerID="c4b4d496c7b5fc826a057c826cfaf62782016b670bf896f7ae51b9871331c63b" exitCode=0 Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.055302 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-q8nkh" event={"ID":"2e2cc1eb-6303-4607-a467-d21cc3c5d25d","Type":"ContainerDied","Data":"c4b4d496c7b5fc826a057c826cfaf62782016b670bf896f7ae51b9871331c63b"} Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.094780 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c58bdbf84-bjfsw" event={"ID":"54445277-2cb9-4dad-b1a4-0b0569ef0088","Type":"ContainerStarted","Data":"60aaed830bfd1c3ba39d4d5a7c147d8461ed95d12290606e903a1e0ddc294de4"} Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.095713 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c58bdbf84-bjfsw" event={"ID":"54445277-2cb9-4dad-b1a4-0b0569ef0088","Type":"ContainerStarted","Data":"1b08dec5bafb614a825d41caf811bcd9d094e8aa1c1f40387b57fb2442e45bac"} Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.096543 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.100741 4809 generic.go:334] "Generic (PLEG): container finished" podID="ab502782-7769-460f-8a2f-86d4886ec40a" containerID="c29b52b7bab2bd667816b074738a33d535361b5debe384c0b591cd5211cd0deb" exitCode=0 Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.101028 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" event={"ID":"ab502782-7769-460f-8a2f-86d4886ec40a","Type":"ContainerDied","Data":"c29b52b7bab2bd667816b074738a33d535361b5debe384c0b591cd5211cd0deb"} Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.116030 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-579f9b76b9-drlks" podStartSLOduration=3.115805265 podStartE2EDuration="3.115805265s" podCreationTimestamp="2025-09-30 00:29:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:29:56.067956892 +0000 UTC m=+1247.104206300" watchObservedRunningTime="2025-09-30 00:29:56.115805265 +0000 UTC m=+1247.152054693" Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.226259 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5c58bdbf84-bjfsw" podStartSLOduration=6.226240721 podStartE2EDuration="6.226240721s" podCreationTimestamp="2025-09-30 00:29:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:29:56.140914398 +0000 UTC m=+1247.177163806" watchObservedRunningTime="2025-09-30 00:29:56.226240721 +0000 UTC m=+1247.262490129" Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.630381 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.720745 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-ovsdbserver-nb\") pod \"736768f4-e809-4c81-879f-f624fcf5c479\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.720823 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-dns-svc\") pod \"736768f4-e809-4c81-879f-f624fcf5c479\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.720870 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-config\") pod \"736768f4-e809-4c81-879f-f624fcf5c479\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.720900 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-dns-swift-storage-0\") pod \"736768f4-e809-4c81-879f-f624fcf5c479\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.720953 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-ovsdbserver-sb\") pod \"736768f4-e809-4c81-879f-f624fcf5c479\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.721461 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gstvt\" (UniqueName: \"kubernetes.io/projected/736768f4-e809-4c81-879f-f624fcf5c479-kube-api-access-gstvt\") pod \"736768f4-e809-4c81-879f-f624fcf5c479\" (UID: \"736768f4-e809-4c81-879f-f624fcf5c479\") " Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.744869 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/736768f4-e809-4c81-879f-f624fcf5c479-kube-api-access-gstvt" (OuterVolumeSpecName: "kube-api-access-gstvt") pod "736768f4-e809-4c81-879f-f624fcf5c479" (UID: "736768f4-e809-4c81-879f-f624fcf5c479"). InnerVolumeSpecName "kube-api-access-gstvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.745955 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "736768f4-e809-4c81-879f-f624fcf5c479" (UID: "736768f4-e809-4c81-879f-f624fcf5c479"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.746989 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "736768f4-e809-4c81-879f-f624fcf5c479" (UID: "736768f4-e809-4c81-879f-f624fcf5c479"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.754013 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "736768f4-e809-4c81-879f-f624fcf5c479" (UID: "736768f4-e809-4c81-879f-f624fcf5c479"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.760048 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-config" (OuterVolumeSpecName: "config") pod "736768f4-e809-4c81-879f-f624fcf5c479" (UID: "736768f4-e809-4c81-879f-f624fcf5c479"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.761279 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "736768f4-e809-4c81-879f-f624fcf5c479" (UID: "736768f4-e809-4c81-879f-f624fcf5c479"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.824852 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.824882 4809 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.824895 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.824907 4809 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.824920 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/736768f4-e809-4c81-879f-f624fcf5c479-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:56 crc kubenswrapper[4809]: I0930 00:29:56.824934 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gstvt\" (UniqueName: \"kubernetes.io/projected/736768f4-e809-4c81-879f-f624fcf5c479-kube-api-access-gstvt\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.124426 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" event={"ID":"736768f4-e809-4c81-879f-f624fcf5c479","Type":"ContainerDied","Data":"650ff01db28040c11c66f2bd1b50e8a9ff81184ec3a18462693003dc2f110ba1"} Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.124514 4809 scope.go:117] "RemoveContainer" containerID="3c051da2d386eeb843e70e88272d2054e729c358f8717128d6c1fa9bb829f826" Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.124616 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-hk2cl" Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.128128 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" event={"ID":"ab502782-7769-460f-8a2f-86d4886ec40a","Type":"ContainerStarted","Data":"db7fc80ecb951daceaff02d4e2289f2c318240092d3e9daa10d8fabdab4c79f2"} Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.158505 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" podStartSLOduration=7.158486602 podStartE2EDuration="7.158486602s" podCreationTimestamp="2025-09-30 00:29:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:29:57.14960319 +0000 UTC m=+1248.185852608" watchObservedRunningTime="2025-09-30 00:29:57.158486602 +0000 UTC m=+1248.194736010" Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.214676 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-hk2cl"] Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.228622 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-hk2cl"] Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.646886 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-q8nkh" Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.703278 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="736768f4-e809-4c81-879f-f624fcf5c479" path="/var/lib/kubelet/pods/736768f4-e809-4c81-879f-f624fcf5c479/volumes" Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.742726 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-logs\") pod \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\" (UID: \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\") " Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.742791 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-config-data\") pod \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\" (UID: \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\") " Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.742959 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-combined-ca-bundle\") pod \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\" (UID: \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\") " Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.742982 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pk4fx\" (UniqueName: \"kubernetes.io/projected/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-kube-api-access-pk4fx\") pod \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\" (UID: \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\") " Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.743034 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-scripts\") pod \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\" (UID: \"2e2cc1eb-6303-4607-a467-d21cc3c5d25d\") " Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.743110 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-logs" (OuterVolumeSpecName: "logs") pod "2e2cc1eb-6303-4607-a467-d21cc3c5d25d" (UID: "2e2cc1eb-6303-4607-a467-d21cc3c5d25d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.743454 4809 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.749752 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-kube-api-access-pk4fx" (OuterVolumeSpecName: "kube-api-access-pk4fx") pod "2e2cc1eb-6303-4607-a467-d21cc3c5d25d" (UID: "2e2cc1eb-6303-4607-a467-d21cc3c5d25d"). InnerVolumeSpecName "kube-api-access-pk4fx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.750309 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-scripts" (OuterVolumeSpecName: "scripts") pod "2e2cc1eb-6303-4607-a467-d21cc3c5d25d" (UID: "2e2cc1eb-6303-4607-a467-d21cc3c5d25d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.774968 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-config-data" (OuterVolumeSpecName: "config-data") pod "2e2cc1eb-6303-4607-a467-d21cc3c5d25d" (UID: "2e2cc1eb-6303-4607-a467-d21cc3c5d25d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.783589 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2e2cc1eb-6303-4607-a467-d21cc3c5d25d" (UID: "2e2cc1eb-6303-4607-a467-d21cc3c5d25d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.845164 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.845484 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pk4fx\" (UniqueName: \"kubernetes.io/projected/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-kube-api-access-pk4fx\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.845495 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:57 crc kubenswrapper[4809]: I0930 00:29:57.845504 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e2cc1eb-6303-4607-a467-d21cc3c5d25d-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.142715 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-q8nkh" event={"ID":"2e2cc1eb-6303-4607-a467-d21cc3c5d25d","Type":"ContainerDied","Data":"c375dead6128511628d167eb7980be80bfbd62510271b4bf9fcc4fbe9335f600"} Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.142760 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c375dead6128511628d167eb7980be80bfbd62510271b4bf9fcc4fbe9335f600" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.142899 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.143027 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-q8nkh" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.371691 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-7f6c7c986d-hgzt9"] Sep 30 00:29:58 crc kubenswrapper[4809]: E0930 00:29:58.372109 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef1a2ce9-936b-4adf-b07a-5790b5915b3d" containerName="init" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.372131 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef1a2ce9-936b-4adf-b07a-5790b5915b3d" containerName="init" Sep 30 00:29:58 crc kubenswrapper[4809]: E0930 00:29:58.372153 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e2cc1eb-6303-4607-a467-d21cc3c5d25d" containerName="placement-db-sync" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.372160 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e2cc1eb-6303-4607-a467-d21cc3c5d25d" containerName="placement-db-sync" Sep 30 00:29:58 crc kubenswrapper[4809]: E0930 00:29:58.372183 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="736768f4-e809-4c81-879f-f624fcf5c479" containerName="init" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.372189 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="736768f4-e809-4c81-879f-f624fcf5c479" containerName="init" Sep 30 00:29:58 crc kubenswrapper[4809]: E0930 00:29:58.372199 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef1a2ce9-936b-4adf-b07a-5790b5915b3d" containerName="dnsmasq-dns" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.372205 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef1a2ce9-936b-4adf-b07a-5790b5915b3d" containerName="dnsmasq-dns" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.372377 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef1a2ce9-936b-4adf-b07a-5790b5915b3d" containerName="dnsmasq-dns" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.372393 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="736768f4-e809-4c81-879f-f624fcf5c479" containerName="init" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.372406 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e2cc1eb-6303-4607-a467-d21cc3c5d25d" containerName="placement-db-sync" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.373505 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.375661 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.375806 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.376031 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.376562 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-fnr26" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.378187 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.390857 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7f6c7c986d-hgzt9"] Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.456578 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/945e30a4-7c16-4109-9240-16a3383dc4ba-config-data\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.456662 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/945e30a4-7c16-4109-9240-16a3383dc4ba-public-tls-certs\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.456747 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/945e30a4-7c16-4109-9240-16a3383dc4ba-combined-ca-bundle\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.456801 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/945e30a4-7c16-4109-9240-16a3383dc4ba-logs\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.457011 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/945e30a4-7c16-4109-9240-16a3383dc4ba-internal-tls-certs\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.457061 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/945e30a4-7c16-4109-9240-16a3383dc4ba-scripts\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.457119 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wnbz\" (UniqueName: \"kubernetes.io/projected/945e30a4-7c16-4109-9240-16a3383dc4ba-kube-api-access-8wnbz\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.558954 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/945e30a4-7c16-4109-9240-16a3383dc4ba-internal-tls-certs\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.559488 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/945e30a4-7c16-4109-9240-16a3383dc4ba-scripts\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.559526 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wnbz\" (UniqueName: \"kubernetes.io/projected/945e30a4-7c16-4109-9240-16a3383dc4ba-kube-api-access-8wnbz\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.559550 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/945e30a4-7c16-4109-9240-16a3383dc4ba-config-data\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.559584 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/945e30a4-7c16-4109-9240-16a3383dc4ba-public-tls-certs\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.559617 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/945e30a4-7c16-4109-9240-16a3383dc4ba-combined-ca-bundle\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.559703 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/945e30a4-7c16-4109-9240-16a3383dc4ba-logs\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.560107 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/945e30a4-7c16-4109-9240-16a3383dc4ba-logs\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.564482 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/945e30a4-7c16-4109-9240-16a3383dc4ba-public-tls-certs\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.564820 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/945e30a4-7c16-4109-9240-16a3383dc4ba-combined-ca-bundle\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.564846 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/945e30a4-7c16-4109-9240-16a3383dc4ba-internal-tls-certs\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.567261 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/945e30a4-7c16-4109-9240-16a3383dc4ba-config-data\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.567508 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/945e30a4-7c16-4109-9240-16a3383dc4ba-scripts\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.595203 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wnbz\" (UniqueName: \"kubernetes.io/projected/945e30a4-7c16-4109-9240-16a3383dc4ba-kube-api-access-8wnbz\") pod \"placement-7f6c7c986d-hgzt9\" (UID: \"945e30a4-7c16-4109-9240-16a3383dc4ba\") " pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.687824 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-d9jks" podUID="ef1a2ce9-936b-4adf-b07a-5790b5915b3d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.150:5353: i/o timeout" Sep 30 00:29:58 crc kubenswrapper[4809]: I0930 00:29:58.715998 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:29:59 crc kubenswrapper[4809]: I0930 00:29:59.156908 4809 generic.go:334] "Generic (PLEG): container finished" podID="956e79d1-b7da-4b76-8ba6-21dd3838aff1" containerID="4f5792ddd193c4566d9b946b885da6715b32632d7a3d069d8dc77fa7506830d4" exitCode=0 Sep 30 00:29:59 crc kubenswrapper[4809]: I0930 00:29:59.156985 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-v6m7r" event={"ID":"956e79d1-b7da-4b76-8ba6-21dd3838aff1","Type":"ContainerDied","Data":"4f5792ddd193c4566d9b946b885da6715b32632d7a3d069d8dc77fa7506830d4"} Sep 30 00:30:00 crc kubenswrapper[4809]: I0930 00:30:00.142063 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd"] Sep 30 00:30:00 crc kubenswrapper[4809]: I0930 00:30:00.143650 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd" Sep 30 00:30:00 crc kubenswrapper[4809]: I0930 00:30:00.145968 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 00:30:00 crc kubenswrapper[4809]: I0930 00:30:00.146144 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 00:30:00 crc kubenswrapper[4809]: I0930 00:30:00.151855 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd"] Sep 30 00:30:00 crc kubenswrapper[4809]: I0930 00:30:00.191237 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d94hj\" (UniqueName: \"kubernetes.io/projected/6c55f706-cdfa-49e9-8c5a-cf953606bd4d-kube-api-access-d94hj\") pod \"collect-profiles-29319870-bz8rd\" (UID: \"6c55f706-cdfa-49e9-8c5a-cf953606bd4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd" Sep 30 00:30:00 crc kubenswrapper[4809]: I0930 00:30:00.191292 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6c55f706-cdfa-49e9-8c5a-cf953606bd4d-config-volume\") pod \"collect-profiles-29319870-bz8rd\" (UID: \"6c55f706-cdfa-49e9-8c5a-cf953606bd4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd" Sep 30 00:30:00 crc kubenswrapper[4809]: I0930 00:30:00.191340 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6c55f706-cdfa-49e9-8c5a-cf953606bd4d-secret-volume\") pod \"collect-profiles-29319870-bz8rd\" (UID: \"6c55f706-cdfa-49e9-8c5a-cf953606bd4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd" Sep 30 00:30:00 crc kubenswrapper[4809]: I0930 00:30:00.293985 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d94hj\" (UniqueName: \"kubernetes.io/projected/6c55f706-cdfa-49e9-8c5a-cf953606bd4d-kube-api-access-d94hj\") pod \"collect-profiles-29319870-bz8rd\" (UID: \"6c55f706-cdfa-49e9-8c5a-cf953606bd4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd" Sep 30 00:30:00 crc kubenswrapper[4809]: I0930 00:30:00.294055 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6c55f706-cdfa-49e9-8c5a-cf953606bd4d-config-volume\") pod \"collect-profiles-29319870-bz8rd\" (UID: \"6c55f706-cdfa-49e9-8c5a-cf953606bd4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd" Sep 30 00:30:00 crc kubenswrapper[4809]: I0930 00:30:00.294112 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6c55f706-cdfa-49e9-8c5a-cf953606bd4d-secret-volume\") pod \"collect-profiles-29319870-bz8rd\" (UID: \"6c55f706-cdfa-49e9-8c5a-cf953606bd4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd" Sep 30 00:30:00 crc kubenswrapper[4809]: I0930 00:30:00.294935 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6c55f706-cdfa-49e9-8c5a-cf953606bd4d-config-volume\") pod \"collect-profiles-29319870-bz8rd\" (UID: \"6c55f706-cdfa-49e9-8c5a-cf953606bd4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd" Sep 30 00:30:00 crc kubenswrapper[4809]: I0930 00:30:00.310152 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d94hj\" (UniqueName: \"kubernetes.io/projected/6c55f706-cdfa-49e9-8c5a-cf953606bd4d-kube-api-access-d94hj\") pod \"collect-profiles-29319870-bz8rd\" (UID: \"6c55f706-cdfa-49e9-8c5a-cf953606bd4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd" Sep 30 00:30:00 crc kubenswrapper[4809]: I0930 00:30:00.314845 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6c55f706-cdfa-49e9-8c5a-cf953606bd4d-secret-volume\") pod \"collect-profiles-29319870-bz8rd\" (UID: \"6c55f706-cdfa-49e9-8c5a-cf953606bd4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd" Sep 30 00:30:00 crc kubenswrapper[4809]: I0930 00:30:00.477432 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.004119 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.078299 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.091819 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-t9mpd"] Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.092109 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" podUID="c63835cb-f0af-438b-8213-da9cfb939cf9" containerName="dnsmasq-dns" containerID="cri-o://bade045d10c87a3d7585632fb8a94b37a23de259c14108a62beed46221f44bc7" gracePeriod=10 Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.190442 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-v6m7r" event={"ID":"956e79d1-b7da-4b76-8ba6-21dd3838aff1","Type":"ContainerDied","Data":"e1e143294b5493cd789b8a2acd669934c09bfc84bb08f08c63ab10c33954292e"} Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.190527 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1e143294b5493cd789b8a2acd669934c09bfc84bb08f08c63ab10c33954292e" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.190530 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-v6m7r" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.213801 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5cvl\" (UniqueName: \"kubernetes.io/projected/956e79d1-b7da-4b76-8ba6-21dd3838aff1-kube-api-access-d5cvl\") pod \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.213877 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-config-data\") pod \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.214094 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-credential-keys\") pod \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.214274 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-scripts\") pod \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.216290 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-combined-ca-bundle\") pod \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.216340 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-fernet-keys\") pod \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\" (UID: \"956e79d1-b7da-4b76-8ba6-21dd3838aff1\") " Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.232224 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/956e79d1-b7da-4b76-8ba6-21dd3838aff1-kube-api-access-d5cvl" (OuterVolumeSpecName: "kube-api-access-d5cvl") pod "956e79d1-b7da-4b76-8ba6-21dd3838aff1" (UID: "956e79d1-b7da-4b76-8ba6-21dd3838aff1"). InnerVolumeSpecName "kube-api-access-d5cvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.232430 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-scripts" (OuterVolumeSpecName: "scripts") pod "956e79d1-b7da-4b76-8ba6-21dd3838aff1" (UID: "956e79d1-b7da-4b76-8ba6-21dd3838aff1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.233790 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "956e79d1-b7da-4b76-8ba6-21dd3838aff1" (UID: "956e79d1-b7da-4b76-8ba6-21dd3838aff1"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.238858 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "956e79d1-b7da-4b76-8ba6-21dd3838aff1" (UID: "956e79d1-b7da-4b76-8ba6-21dd3838aff1"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.300339 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-697fb77f5c-2p8qd"] Sep 30 00:30:01 crc kubenswrapper[4809]: E0930 00:30:01.301129 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="956e79d1-b7da-4b76-8ba6-21dd3838aff1" containerName="keystone-bootstrap" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.301152 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="956e79d1-b7da-4b76-8ba6-21dd3838aff1" containerName="keystone-bootstrap" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.301492 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="956e79d1-b7da-4b76-8ba6-21dd3838aff1" containerName="keystone-bootstrap" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.302451 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.311207 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.311340 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.323767 4809 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.323815 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.323827 4809 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.323838 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5cvl\" (UniqueName: \"kubernetes.io/projected/956e79d1-b7da-4b76-8ba6-21dd3838aff1-kube-api-access-d5cvl\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.328606 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-config-data" (OuterVolumeSpecName: "config-data") pod "956e79d1-b7da-4b76-8ba6-21dd3838aff1" (UID: "956e79d1-b7da-4b76-8ba6-21dd3838aff1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.348633 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-697fb77f5c-2p8qd"] Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.377760 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "956e79d1-b7da-4b76-8ba6-21dd3838aff1" (UID: "956e79d1-b7da-4b76-8ba6-21dd3838aff1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.425543 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-fernet-keys\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.425585 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-public-tls-certs\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.425634 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-scripts\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.425736 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-internal-tls-certs\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.425752 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b979j\" (UniqueName: \"kubernetes.io/projected/7b313c12-f4aa-4511-a071-8e246d12e74f-kube-api-access-b979j\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.425772 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-config-data\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.425808 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-credential-keys\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.425910 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-combined-ca-bundle\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.426002 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.426015 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/956e79d1-b7da-4b76-8ba6-21dd3838aff1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.434913 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd"] Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.527376 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-internal-tls-certs\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.527406 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b979j\" (UniqueName: \"kubernetes.io/projected/7b313c12-f4aa-4511-a071-8e246d12e74f-kube-api-access-b979j\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.527462 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-config-data\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.527487 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-credential-keys\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.527505 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-combined-ca-bundle\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.527593 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-fernet-keys\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.527612 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-public-tls-certs\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.527690 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-scripts\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.532601 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-scripts\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.543399 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-credential-keys\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.543729 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-fernet-keys\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.554023 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-internal-tls-certs\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.554172 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-combined-ca-bundle\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.554203 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-public-tls-certs\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.556356 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b979j\" (UniqueName: \"kubernetes.io/projected/7b313c12-f4aa-4511-a071-8e246d12e74f-kube-api-access-b979j\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.556496 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b313c12-f4aa-4511-a071-8e246d12e74f-config-data\") pod \"keystone-697fb77f5c-2p8qd\" (UID: \"7b313c12-f4aa-4511-a071-8e246d12e74f\") " pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.736428 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7f6c7c986d-hgzt9"] Sep 30 00:30:01 crc kubenswrapper[4809]: W0930 00:30:01.748172 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod945e30a4_7c16_4109_9240_16a3383dc4ba.slice/crio-2b051e5b4cd33afe5e808231f65618fdccb7c03c3d719db69fd6f8850fc703b3 WatchSource:0}: Error finding container 2b051e5b4cd33afe5e808231f65618fdccb7c03c3d719db69fd6f8850fc703b3: Status 404 returned error can't find the container with id 2b051e5b4cd33afe5e808231f65618fdccb7c03c3d719db69fd6f8850fc703b3 Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.770274 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.787070 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.833412 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-ovsdbserver-nb\") pod \"c63835cb-f0af-438b-8213-da9cfb939cf9\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.833489 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-ovsdbserver-sb\") pod \"c63835cb-f0af-438b-8213-da9cfb939cf9\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.833528 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-dns-svc\") pod \"c63835cb-f0af-438b-8213-da9cfb939cf9\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.833548 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-config\") pod \"c63835cb-f0af-438b-8213-da9cfb939cf9\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.833598 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txt7k\" (UniqueName: \"kubernetes.io/projected/c63835cb-f0af-438b-8213-da9cfb939cf9-kube-api-access-txt7k\") pod \"c63835cb-f0af-438b-8213-da9cfb939cf9\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.833670 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-dns-swift-storage-0\") pod \"c63835cb-f0af-438b-8213-da9cfb939cf9\" (UID: \"c63835cb-f0af-438b-8213-da9cfb939cf9\") " Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.845873 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c63835cb-f0af-438b-8213-da9cfb939cf9-kube-api-access-txt7k" (OuterVolumeSpecName: "kube-api-access-txt7k") pod "c63835cb-f0af-438b-8213-da9cfb939cf9" (UID: "c63835cb-f0af-438b-8213-da9cfb939cf9"). InnerVolumeSpecName "kube-api-access-txt7k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.942718 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c63835cb-f0af-438b-8213-da9cfb939cf9" (UID: "c63835cb-f0af-438b-8213-da9cfb939cf9"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.943871 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txt7k\" (UniqueName: \"kubernetes.io/projected/c63835cb-f0af-438b-8213-da9cfb939cf9-kube-api-access-txt7k\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.943900 4809 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.963225 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c63835cb-f0af-438b-8213-da9cfb939cf9" (UID: "c63835cb-f0af-438b-8213-da9cfb939cf9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.967279 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c63835cb-f0af-438b-8213-da9cfb939cf9" (UID: "c63835cb-f0af-438b-8213-da9cfb939cf9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.968152 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c63835cb-f0af-438b-8213-da9cfb939cf9" (UID: "c63835cb-f0af-438b-8213-da9cfb939cf9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:01 crc kubenswrapper[4809]: I0930 00:30:01.980448 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-config" (OuterVolumeSpecName: "config") pod "c63835cb-f0af-438b-8213-da9cfb939cf9" (UID: "c63835cb-f0af-438b-8213-da9cfb939cf9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:02 crc kubenswrapper[4809]: I0930 00:30:02.045073 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:02 crc kubenswrapper[4809]: I0930 00:30:02.045100 4809 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:02 crc kubenswrapper[4809]: I0930 00:30:02.045109 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:02 crc kubenswrapper[4809]: I0930 00:30:02.045120 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c63835cb-f0af-438b-8213-da9cfb939cf9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:02 crc kubenswrapper[4809]: I0930 00:30:02.245955 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3efcbfdc-c939-452c-96e5-244bc7b7bff3","Type":"ContainerStarted","Data":"3b72172354d849e8eef7780aa60b2ab13a76af71b6590602e03d7a9fe8d132e2"} Sep 30 00:30:02 crc kubenswrapper[4809]: I0930 00:30:02.256715 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd" event={"ID":"6c55f706-cdfa-49e9-8c5a-cf953606bd4d","Type":"ContainerStarted","Data":"fcf4162525b4c58078f67fda805fbdee76aa6aea7e6d1012a372e58dd7e6867f"} Sep 30 00:30:02 crc kubenswrapper[4809]: I0930 00:30:02.256915 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd" event={"ID":"6c55f706-cdfa-49e9-8c5a-cf953606bd4d","Type":"ContainerStarted","Data":"d96bb754d17a4839127eed926919f516f72373a586b4c3ef12331af86560b332"} Sep 30 00:30:02 crc kubenswrapper[4809]: I0930 00:30:02.268097 4809 generic.go:334] "Generic (PLEG): container finished" podID="c63835cb-f0af-438b-8213-da9cfb939cf9" containerID="bade045d10c87a3d7585632fb8a94b37a23de259c14108a62beed46221f44bc7" exitCode=0 Sep 30 00:30:02 crc kubenswrapper[4809]: I0930 00:30:02.268156 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" event={"ID":"c63835cb-f0af-438b-8213-da9cfb939cf9","Type":"ContainerDied","Data":"bade045d10c87a3d7585632fb8a94b37a23de259c14108a62beed46221f44bc7"} Sep 30 00:30:02 crc kubenswrapper[4809]: I0930 00:30:02.268180 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" event={"ID":"c63835cb-f0af-438b-8213-da9cfb939cf9","Type":"ContainerDied","Data":"27d21cff35877d0eadcb2fc9d1b0e6e655ea67ae438f782cda26be4dadcb4354"} Sep 30 00:30:02 crc kubenswrapper[4809]: I0930 00:30:02.268197 4809 scope.go:117] "RemoveContainer" containerID="bade045d10c87a3d7585632fb8a94b37a23de259c14108a62beed46221f44bc7" Sep 30 00:30:02 crc kubenswrapper[4809]: I0930 00:30:02.268306 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-t9mpd" Sep 30 00:30:02 crc kubenswrapper[4809]: I0930 00:30:02.281351 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7f6c7c986d-hgzt9" event={"ID":"945e30a4-7c16-4109-9240-16a3383dc4ba","Type":"ContainerStarted","Data":"f54b9d74e7b0a7c214a3e870325b4ef7be85e686862039503f27fc3507b7b4c3"} Sep 30 00:30:02 crc kubenswrapper[4809]: I0930 00:30:02.281394 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7f6c7c986d-hgzt9" event={"ID":"945e30a4-7c16-4109-9240-16a3383dc4ba","Type":"ContainerStarted","Data":"2b051e5b4cd33afe5e808231f65618fdccb7c03c3d719db69fd6f8850fc703b3"} Sep 30 00:30:02 crc kubenswrapper[4809]: I0930 00:30:02.318144 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-t9mpd"] Sep 30 00:30:02 crc kubenswrapper[4809]: I0930 00:30:02.328826 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-t9mpd"] Sep 30 00:30:02 crc kubenswrapper[4809]: I0930 00:30:02.387742 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-697fb77f5c-2p8qd"] Sep 30 00:30:03 crc kubenswrapper[4809]: I0930 00:30:03.291049 4809 generic.go:334] "Generic (PLEG): container finished" podID="6c55f706-cdfa-49e9-8c5a-cf953606bd4d" containerID="fcf4162525b4c58078f67fda805fbdee76aa6aea7e6d1012a372e58dd7e6867f" exitCode=0 Sep 30 00:30:03 crc kubenswrapper[4809]: I0930 00:30:03.291098 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd" event={"ID":"6c55f706-cdfa-49e9-8c5a-cf953606bd4d","Type":"ContainerDied","Data":"fcf4162525b4c58078f67fda805fbdee76aa6aea7e6d1012a372e58dd7e6867f"} Sep 30 00:30:03 crc kubenswrapper[4809]: I0930 00:30:03.319929 4809 scope.go:117] "RemoveContainer" containerID="c87fb27f4cdfffbabf12813ec03b38c6baebb046bea16bf668c792bd3d807aaf" Sep 30 00:30:03 crc kubenswrapper[4809]: I0930 00:30:03.523724 4809 scope.go:117] "RemoveContainer" containerID="bade045d10c87a3d7585632fb8a94b37a23de259c14108a62beed46221f44bc7" Sep 30 00:30:03 crc kubenswrapper[4809]: E0930 00:30:03.525403 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bade045d10c87a3d7585632fb8a94b37a23de259c14108a62beed46221f44bc7\": container with ID starting with bade045d10c87a3d7585632fb8a94b37a23de259c14108a62beed46221f44bc7 not found: ID does not exist" containerID="bade045d10c87a3d7585632fb8a94b37a23de259c14108a62beed46221f44bc7" Sep 30 00:30:03 crc kubenswrapper[4809]: I0930 00:30:03.525436 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bade045d10c87a3d7585632fb8a94b37a23de259c14108a62beed46221f44bc7"} err="failed to get container status \"bade045d10c87a3d7585632fb8a94b37a23de259c14108a62beed46221f44bc7\": rpc error: code = NotFound desc = could not find container \"bade045d10c87a3d7585632fb8a94b37a23de259c14108a62beed46221f44bc7\": container with ID starting with bade045d10c87a3d7585632fb8a94b37a23de259c14108a62beed46221f44bc7 not found: ID does not exist" Sep 30 00:30:03 crc kubenswrapper[4809]: I0930 00:30:03.525456 4809 scope.go:117] "RemoveContainer" containerID="c87fb27f4cdfffbabf12813ec03b38c6baebb046bea16bf668c792bd3d807aaf" Sep 30 00:30:03 crc kubenswrapper[4809]: E0930 00:30:03.526094 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c87fb27f4cdfffbabf12813ec03b38c6baebb046bea16bf668c792bd3d807aaf\": container with ID starting with c87fb27f4cdfffbabf12813ec03b38c6baebb046bea16bf668c792bd3d807aaf not found: ID does not exist" containerID="c87fb27f4cdfffbabf12813ec03b38c6baebb046bea16bf668c792bd3d807aaf" Sep 30 00:30:03 crc kubenswrapper[4809]: I0930 00:30:03.526117 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c87fb27f4cdfffbabf12813ec03b38c6baebb046bea16bf668c792bd3d807aaf"} err="failed to get container status \"c87fb27f4cdfffbabf12813ec03b38c6baebb046bea16bf668c792bd3d807aaf\": rpc error: code = NotFound desc = could not find container \"c87fb27f4cdfffbabf12813ec03b38c6baebb046bea16bf668c792bd3d807aaf\": container with ID starting with c87fb27f4cdfffbabf12813ec03b38c6baebb046bea16bf668c792bd3d807aaf not found: ID does not exist" Sep 30 00:30:03 crc kubenswrapper[4809]: I0930 00:30:03.671193 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd" Sep 30 00:30:03 crc kubenswrapper[4809]: I0930 00:30:03.707125 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c63835cb-f0af-438b-8213-da9cfb939cf9" path="/var/lib/kubelet/pods/c63835cb-f0af-438b-8213-da9cfb939cf9/volumes" Sep 30 00:30:03 crc kubenswrapper[4809]: I0930 00:30:03.775634 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6c55f706-cdfa-49e9-8c5a-cf953606bd4d-config-volume\") pod \"6c55f706-cdfa-49e9-8c5a-cf953606bd4d\" (UID: \"6c55f706-cdfa-49e9-8c5a-cf953606bd4d\") " Sep 30 00:30:03 crc kubenswrapper[4809]: I0930 00:30:03.775903 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6c55f706-cdfa-49e9-8c5a-cf953606bd4d-secret-volume\") pod \"6c55f706-cdfa-49e9-8c5a-cf953606bd4d\" (UID: \"6c55f706-cdfa-49e9-8c5a-cf953606bd4d\") " Sep 30 00:30:03 crc kubenswrapper[4809]: I0930 00:30:03.775958 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d94hj\" (UniqueName: \"kubernetes.io/projected/6c55f706-cdfa-49e9-8c5a-cf953606bd4d-kube-api-access-d94hj\") pod \"6c55f706-cdfa-49e9-8c5a-cf953606bd4d\" (UID: \"6c55f706-cdfa-49e9-8c5a-cf953606bd4d\") " Sep 30 00:30:03 crc kubenswrapper[4809]: I0930 00:30:03.776346 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c55f706-cdfa-49e9-8c5a-cf953606bd4d-config-volume" (OuterVolumeSpecName: "config-volume") pod "6c55f706-cdfa-49e9-8c5a-cf953606bd4d" (UID: "6c55f706-cdfa-49e9-8c5a-cf953606bd4d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:03 crc kubenswrapper[4809]: I0930 00:30:03.780075 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c55f706-cdfa-49e9-8c5a-cf953606bd4d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6c55f706-cdfa-49e9-8c5a-cf953606bd4d" (UID: "6c55f706-cdfa-49e9-8c5a-cf953606bd4d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:03 crc kubenswrapper[4809]: I0930 00:30:03.781059 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c55f706-cdfa-49e9-8c5a-cf953606bd4d-kube-api-access-d94hj" (OuterVolumeSpecName: "kube-api-access-d94hj") pod "6c55f706-cdfa-49e9-8c5a-cf953606bd4d" (UID: "6c55f706-cdfa-49e9-8c5a-cf953606bd4d"). InnerVolumeSpecName "kube-api-access-d94hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:03 crc kubenswrapper[4809]: I0930 00:30:03.878005 4809 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6c55f706-cdfa-49e9-8c5a-cf953606bd4d-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:03 crc kubenswrapper[4809]: I0930 00:30:03.878038 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d94hj\" (UniqueName: \"kubernetes.io/projected/6c55f706-cdfa-49e9-8c5a-cf953606bd4d-kube-api-access-d94hj\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:03 crc kubenswrapper[4809]: I0930 00:30:03.878047 4809 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6c55f706-cdfa-49e9-8c5a-cf953606bd4d-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:04 crc kubenswrapper[4809]: I0930 00:30:04.305523 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7f6c7c986d-hgzt9" event={"ID":"945e30a4-7c16-4109-9240-16a3383dc4ba","Type":"ContainerStarted","Data":"4438562ea6c2b785bdd3384ccd9b6f043277f5e181044086b1bf7f3bdba6434b"} Sep 30 00:30:04 crc kubenswrapper[4809]: I0930 00:30:04.306987 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:30:04 crc kubenswrapper[4809]: I0930 00:30:04.307023 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:30:04 crc kubenswrapper[4809]: I0930 00:30:04.322482 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd" Sep 30 00:30:04 crc kubenswrapper[4809]: I0930 00:30:04.322476 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd" event={"ID":"6c55f706-cdfa-49e9-8c5a-cf953606bd4d","Type":"ContainerDied","Data":"d96bb754d17a4839127eed926919f516f72373a586b4c3ef12331af86560b332"} Sep 30 00:30:04 crc kubenswrapper[4809]: I0930 00:30:04.322618 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d96bb754d17a4839127eed926919f516f72373a586b4c3ef12331af86560b332" Sep 30 00:30:04 crc kubenswrapper[4809]: I0930 00:30:04.325630 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-697fb77f5c-2p8qd" event={"ID":"7b313c12-f4aa-4511-a071-8e246d12e74f","Type":"ContainerStarted","Data":"ce310cc6e10ed11c358720a3282292cfc33624bd6c70fb04f3896e6919af1ed8"} Sep 30 00:30:04 crc kubenswrapper[4809]: I0930 00:30:04.325682 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-697fb77f5c-2p8qd" event={"ID":"7b313c12-f4aa-4511-a071-8e246d12e74f","Type":"ContainerStarted","Data":"eef9351a2ac783d562d0f08d4c3505572b9a35970a199407e830fac3e0b25231"} Sep 30 00:30:04 crc kubenswrapper[4809]: I0930 00:30:04.325817 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:04 crc kubenswrapper[4809]: I0930 00:30:04.343524 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-7f6c7c986d-hgzt9" podStartSLOduration=6.3435021240000005 podStartE2EDuration="6.343502124s" podCreationTimestamp="2025-09-30 00:29:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:30:04.333616644 +0000 UTC m=+1255.369866062" watchObservedRunningTime="2025-09-30 00:30:04.343502124 +0000 UTC m=+1255.379751532" Sep 30 00:30:04 crc kubenswrapper[4809]: I0930 00:30:04.377541 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-697fb77f5c-2p8qd" podStartSLOduration=3.37752061 podStartE2EDuration="3.37752061s" podCreationTimestamp="2025-09-30 00:30:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:30:04.352487288 +0000 UTC m=+1255.388736696" watchObservedRunningTime="2025-09-30 00:30:04.37752061 +0000 UTC m=+1255.413770018" Sep 30 00:30:05 crc kubenswrapper[4809]: I0930 00:30:05.337667 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jkrgs" event={"ID":"a1ccba93-c72a-4aea-b972-c54bbe6bfc99","Type":"ContainerStarted","Data":"613fad92757ab6cf194a11100d250e3170421fe74f47b479543d4b633584a052"} Sep 30 00:30:05 crc kubenswrapper[4809]: I0930 00:30:05.363534 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-jkrgs" podStartSLOduration=2.7572511950000003 podStartE2EDuration="42.363503754s" podCreationTimestamp="2025-09-30 00:29:23 +0000 UTC" firstStartedPulling="2025-09-30 00:29:24.657340059 +0000 UTC m=+1215.693589467" lastFinishedPulling="2025-09-30 00:30:04.263592618 +0000 UTC m=+1255.299842026" observedRunningTime="2025-09-30 00:30:05.357694016 +0000 UTC m=+1256.393943434" watchObservedRunningTime="2025-09-30 00:30:05.363503754 +0000 UTC m=+1256.399753172" Sep 30 00:30:07 crc kubenswrapper[4809]: I0930 00:30:07.361951 4809 generic.go:334] "Generic (PLEG): container finished" podID="a1ccba93-c72a-4aea-b972-c54bbe6bfc99" containerID="613fad92757ab6cf194a11100d250e3170421fe74f47b479543d4b633584a052" exitCode=0 Sep 30 00:30:07 crc kubenswrapper[4809]: I0930 00:30:07.362066 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jkrgs" event={"ID":"a1ccba93-c72a-4aea-b972-c54bbe6bfc99","Type":"ContainerDied","Data":"613fad92757ab6cf194a11100d250e3170421fe74f47b479543d4b633584a052"} Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.193520 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jkrgs" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.304375 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a1ccba93-c72a-4aea-b972-c54bbe6bfc99-db-sync-config-data\") pod \"a1ccba93-c72a-4aea-b972-c54bbe6bfc99\" (UID: \"a1ccba93-c72a-4aea-b972-c54bbe6bfc99\") " Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.304680 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m75gn\" (UniqueName: \"kubernetes.io/projected/a1ccba93-c72a-4aea-b972-c54bbe6bfc99-kube-api-access-m75gn\") pod \"a1ccba93-c72a-4aea-b972-c54bbe6bfc99\" (UID: \"a1ccba93-c72a-4aea-b972-c54bbe6bfc99\") " Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.304848 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1ccba93-c72a-4aea-b972-c54bbe6bfc99-combined-ca-bundle\") pod \"a1ccba93-c72a-4aea-b972-c54bbe6bfc99\" (UID: \"a1ccba93-c72a-4aea-b972-c54bbe6bfc99\") " Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.309080 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1ccba93-c72a-4aea-b972-c54bbe6bfc99-kube-api-access-m75gn" (OuterVolumeSpecName: "kube-api-access-m75gn") pod "a1ccba93-c72a-4aea-b972-c54bbe6bfc99" (UID: "a1ccba93-c72a-4aea-b972-c54bbe6bfc99"). InnerVolumeSpecName "kube-api-access-m75gn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.309408 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1ccba93-c72a-4aea-b972-c54bbe6bfc99-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "a1ccba93-c72a-4aea-b972-c54bbe6bfc99" (UID: "a1ccba93-c72a-4aea-b972-c54bbe6bfc99"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.347061 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1ccba93-c72a-4aea-b972-c54bbe6bfc99-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a1ccba93-c72a-4aea-b972-c54bbe6bfc99" (UID: "a1ccba93-c72a-4aea-b972-c54bbe6bfc99"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.397406 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-qttpq" event={"ID":"23ff9291-ad89-46a6-9d4a-adf9e545adb2","Type":"ContainerStarted","Data":"8b626b4d4e767700ac33232df90ec4d60f87c18ee0c45a30f5c6c8ddb44d31a0"} Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.401668 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jkrgs" event={"ID":"a1ccba93-c72a-4aea-b972-c54bbe6bfc99","Type":"ContainerDied","Data":"bf870fddcc14a3252216193326aa5bab4a44725734b7f853ebc61c342ab9c027"} Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.401713 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bf870fddcc14a3252216193326aa5bab4a44725734b7f853ebc61c342ab9c027" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.401800 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jkrgs" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.408270 4809 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a1ccba93-c72a-4aea-b972-c54bbe6bfc99-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.408310 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m75gn\" (UniqueName: \"kubernetes.io/projected/a1ccba93-c72a-4aea-b972-c54bbe6bfc99-kube-api-access-m75gn\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.408325 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1ccba93-c72a-4aea-b972-c54bbe6bfc99-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.429931 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-qttpq" podStartSLOduration=6.062918483 podStartE2EDuration="47.429914971s" podCreationTimestamp="2025-09-30 00:29:22 +0000 UTC" firstStartedPulling="2025-09-30 00:29:24.065840213 +0000 UTC m=+1215.102089621" lastFinishedPulling="2025-09-30 00:30:05.432836701 +0000 UTC m=+1256.469086109" observedRunningTime="2025-09-30 00:30:09.415524079 +0000 UTC m=+1260.451773487" watchObservedRunningTime="2025-09-30 00:30:09.429914971 +0000 UTC m=+1260.466164369" Sep 30 00:30:09 crc kubenswrapper[4809]: E0930 00:30:09.450507 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="3efcbfdc-c939-452c-96e5-244bc7b7bff3" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.613793 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6b8d67c9f-x6rts"] Sep 30 00:30:09 crc kubenswrapper[4809]: E0930 00:30:09.614225 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c55f706-cdfa-49e9-8c5a-cf953606bd4d" containerName="collect-profiles" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.614237 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c55f706-cdfa-49e9-8c5a-cf953606bd4d" containerName="collect-profiles" Sep 30 00:30:09 crc kubenswrapper[4809]: E0930 00:30:09.614250 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c63835cb-f0af-438b-8213-da9cfb939cf9" containerName="init" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.614255 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="c63835cb-f0af-438b-8213-da9cfb939cf9" containerName="init" Sep 30 00:30:09 crc kubenswrapper[4809]: E0930 00:30:09.614265 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c63835cb-f0af-438b-8213-da9cfb939cf9" containerName="dnsmasq-dns" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.614272 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="c63835cb-f0af-438b-8213-da9cfb939cf9" containerName="dnsmasq-dns" Sep 30 00:30:09 crc kubenswrapper[4809]: E0930 00:30:09.614289 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1ccba93-c72a-4aea-b972-c54bbe6bfc99" containerName="barbican-db-sync" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.614295 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1ccba93-c72a-4aea-b972-c54bbe6bfc99" containerName="barbican-db-sync" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.614495 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c55f706-cdfa-49e9-8c5a-cf953606bd4d" containerName="collect-profiles" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.614514 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1ccba93-c72a-4aea-b972-c54bbe6bfc99" containerName="barbican-db-sync" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.614530 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="c63835cb-f0af-438b-8213-da9cfb939cf9" containerName="dnsmasq-dns" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.615968 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6b8d67c9f-x6rts" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.620349 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-l58q9" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.620554 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.620707 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.651231 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6b8d67c9f-x6rts"] Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.716665 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08e2596d-d7a4-46ea-ab20-054084e61605-config-data\") pod \"barbican-worker-6b8d67c9f-x6rts\" (UID: \"08e2596d-d7a4-46ea-ab20-054084e61605\") " pod="openstack/barbican-worker-6b8d67c9f-x6rts" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.717144 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08e2596d-d7a4-46ea-ab20-054084e61605-logs\") pod \"barbican-worker-6b8d67c9f-x6rts\" (UID: \"08e2596d-d7a4-46ea-ab20-054084e61605\") " pod="openstack/barbican-worker-6b8d67c9f-x6rts" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.717166 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdk8j\" (UniqueName: \"kubernetes.io/projected/08e2596d-d7a4-46ea-ab20-054084e61605-kube-api-access-mdk8j\") pod \"barbican-worker-6b8d67c9f-x6rts\" (UID: \"08e2596d-d7a4-46ea-ab20-054084e61605\") " pod="openstack/barbican-worker-6b8d67c9f-x6rts" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.717204 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/08e2596d-d7a4-46ea-ab20-054084e61605-config-data-custom\") pod \"barbican-worker-6b8d67c9f-x6rts\" (UID: \"08e2596d-d7a4-46ea-ab20-054084e61605\") " pod="openstack/barbican-worker-6b8d67c9f-x6rts" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.717224 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08e2596d-d7a4-46ea-ab20-054084e61605-combined-ca-bundle\") pod \"barbican-worker-6b8d67c9f-x6rts\" (UID: \"08e2596d-d7a4-46ea-ab20-054084e61605\") " pod="openstack/barbican-worker-6b8d67c9f-x6rts" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.770338 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-764b94dc8b-rsfhs"] Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.772015 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.781350 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.832615 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08e2596d-d7a4-46ea-ab20-054084e61605-config-data\") pod \"barbican-worker-6b8d67c9f-x6rts\" (UID: \"08e2596d-d7a4-46ea-ab20-054084e61605\") " pod="openstack/barbican-worker-6b8d67c9f-x6rts" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.832696 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08e2596d-d7a4-46ea-ab20-054084e61605-logs\") pod \"barbican-worker-6b8d67c9f-x6rts\" (UID: \"08e2596d-d7a4-46ea-ab20-054084e61605\") " pod="openstack/barbican-worker-6b8d67c9f-x6rts" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.832734 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdk8j\" (UniqueName: \"kubernetes.io/projected/08e2596d-d7a4-46ea-ab20-054084e61605-kube-api-access-mdk8j\") pod \"barbican-worker-6b8d67c9f-x6rts\" (UID: \"08e2596d-d7a4-46ea-ab20-054084e61605\") " pod="openstack/barbican-worker-6b8d67c9f-x6rts" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.832768 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/08e2596d-d7a4-46ea-ab20-054084e61605-config-data-custom\") pod \"barbican-worker-6b8d67c9f-x6rts\" (UID: \"08e2596d-d7a4-46ea-ab20-054084e61605\") " pod="openstack/barbican-worker-6b8d67c9f-x6rts" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.832786 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08e2596d-d7a4-46ea-ab20-054084e61605-combined-ca-bundle\") pod \"barbican-worker-6b8d67c9f-x6rts\" (UID: \"08e2596d-d7a4-46ea-ab20-054084e61605\") " pod="openstack/barbican-worker-6b8d67c9f-x6rts" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.836119 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08e2596d-d7a4-46ea-ab20-054084e61605-logs\") pod \"barbican-worker-6b8d67c9f-x6rts\" (UID: \"08e2596d-d7a4-46ea-ab20-054084e61605\") " pod="openstack/barbican-worker-6b8d67c9f-x6rts" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.839384 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.840092 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.852602 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08e2596d-d7a4-46ea-ab20-054084e61605-config-data\") pod \"barbican-worker-6b8d67c9f-x6rts\" (UID: \"08e2596d-d7a4-46ea-ab20-054084e61605\") " pod="openstack/barbican-worker-6b8d67c9f-x6rts" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.862179 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/08e2596d-d7a4-46ea-ab20-054084e61605-config-data-custom\") pod \"barbican-worker-6b8d67c9f-x6rts\" (UID: \"08e2596d-d7a4-46ea-ab20-054084e61605\") " pod="openstack/barbican-worker-6b8d67c9f-x6rts" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.863928 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08e2596d-d7a4-46ea-ab20-054084e61605-combined-ca-bundle\") pod \"barbican-worker-6b8d67c9f-x6rts\" (UID: \"08e2596d-d7a4-46ea-ab20-054084e61605\") " pod="openstack/barbican-worker-6b8d67c9f-x6rts" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.864634 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-764b94dc8b-rsfhs"] Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.866001 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdk8j\" (UniqueName: \"kubernetes.io/projected/08e2596d-d7a4-46ea-ab20-054084e61605-kube-api-access-mdk8j\") pod \"barbican-worker-6b8d67c9f-x6rts\" (UID: \"08e2596d-d7a4-46ea-ab20-054084e61605\") " pod="openstack/barbican-worker-6b8d67c9f-x6rts" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.888542 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-slgbb"] Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.890133 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.920528 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-slgbb"] Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.933881 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6b8bf7879d-9q9nt"] Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.935293 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvtvs\" (UniqueName: \"kubernetes.io/projected/69f247bc-7b09-4e36-b749-0889a206162c-kube-api-access-bvtvs\") pod \"barbican-keystone-listener-764b94dc8b-rsfhs\" (UID: \"69f247bc-7b09-4e36-b749-0889a206162c\") " pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.935501 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69f247bc-7b09-4e36-b749-0889a206162c-logs\") pod \"barbican-keystone-listener-764b94dc8b-rsfhs\" (UID: \"69f247bc-7b09-4e36-b749-0889a206162c\") " pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.935636 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69f247bc-7b09-4e36-b749-0889a206162c-combined-ca-bundle\") pod \"barbican-keystone-listener-764b94dc8b-rsfhs\" (UID: \"69f247bc-7b09-4e36-b749-0889a206162c\") " pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.935777 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69f247bc-7b09-4e36-b749-0889a206162c-config-data\") pod \"barbican-keystone-listener-764b94dc8b-rsfhs\" (UID: \"69f247bc-7b09-4e36-b749-0889a206162c\") " pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.935943 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/69f247bc-7b09-4e36-b749-0889a206162c-config-data-custom\") pod \"barbican-keystone-listener-764b94dc8b-rsfhs\" (UID: \"69f247bc-7b09-4e36-b749-0889a206162c\") " pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.935391 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.938132 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Sep 30 00:30:09 crc kubenswrapper[4809]: I0930 00:30:09.999782 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6b8bf7879d-9q9nt"] Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.002597 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-l58q9" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.004520 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6b8d67c9f-x6rts" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.038096 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3c852e4-ed87-4a74-9660-fcd02babe2e5-config-data\") pod \"barbican-api-6b8bf7879d-9q9nt\" (UID: \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\") " pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.038367 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69f247bc-7b09-4e36-b749-0889a206162c-logs\") pod \"barbican-keystone-listener-764b94dc8b-rsfhs\" (UID: \"69f247bc-7b09-4e36-b749-0889a206162c\") " pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.038421 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69f247bc-7b09-4e36-b749-0889a206162c-combined-ca-bundle\") pod \"barbican-keystone-listener-764b94dc8b-rsfhs\" (UID: \"69f247bc-7b09-4e36-b749-0889a206162c\") " pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.038448 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-config\") pod \"dnsmasq-dns-75c8ddd69c-slgbb\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.038482 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-slgbb\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.038504 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69f247bc-7b09-4e36-b749-0889a206162c-config-data\") pod \"barbican-keystone-listener-764b94dc8b-rsfhs\" (UID: \"69f247bc-7b09-4e36-b749-0889a206162c\") " pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.038550 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-slgbb\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.038580 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3c852e4-ed87-4a74-9660-fcd02babe2e5-combined-ca-bundle\") pod \"barbican-api-6b8bf7879d-9q9nt\" (UID: \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\") " pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.038633 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/69f247bc-7b09-4e36-b749-0889a206162c-config-data-custom\") pod \"barbican-keystone-listener-764b94dc8b-rsfhs\" (UID: \"69f247bc-7b09-4e36-b749-0889a206162c\") " pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.038685 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c474g\" (UniqueName: \"kubernetes.io/projected/92f70fbe-b165-4384-a087-4f6afd932cf1-kube-api-access-c474g\") pod \"dnsmasq-dns-75c8ddd69c-slgbb\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.038717 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f3c852e4-ed87-4a74-9660-fcd02babe2e5-config-data-custom\") pod \"barbican-api-6b8bf7879d-9q9nt\" (UID: \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\") " pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.038754 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3c852e4-ed87-4a74-9660-fcd02babe2e5-logs\") pod \"barbican-api-6b8bf7879d-9q9nt\" (UID: \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\") " pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.038794 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkwcj\" (UniqueName: \"kubernetes.io/projected/f3c852e4-ed87-4a74-9660-fcd02babe2e5-kube-api-access-bkwcj\") pod \"barbican-api-6b8bf7879d-9q9nt\" (UID: \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\") " pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.038822 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-slgbb\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.038847 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvtvs\" (UniqueName: \"kubernetes.io/projected/69f247bc-7b09-4e36-b749-0889a206162c-kube-api-access-bvtvs\") pod \"barbican-keystone-listener-764b94dc8b-rsfhs\" (UID: \"69f247bc-7b09-4e36-b749-0889a206162c\") " pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.038846 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69f247bc-7b09-4e36-b749-0889a206162c-logs\") pod \"barbican-keystone-listener-764b94dc8b-rsfhs\" (UID: \"69f247bc-7b09-4e36-b749-0889a206162c\") " pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.039342 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-slgbb\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.048183 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69f247bc-7b09-4e36-b749-0889a206162c-combined-ca-bundle\") pod \"barbican-keystone-listener-764b94dc8b-rsfhs\" (UID: \"69f247bc-7b09-4e36-b749-0889a206162c\") " pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.048672 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/69f247bc-7b09-4e36-b749-0889a206162c-config-data-custom\") pod \"barbican-keystone-listener-764b94dc8b-rsfhs\" (UID: \"69f247bc-7b09-4e36-b749-0889a206162c\") " pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.056682 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvtvs\" (UniqueName: \"kubernetes.io/projected/69f247bc-7b09-4e36-b749-0889a206162c-kube-api-access-bvtvs\") pod \"barbican-keystone-listener-764b94dc8b-rsfhs\" (UID: \"69f247bc-7b09-4e36-b749-0889a206162c\") " pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.058432 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69f247bc-7b09-4e36-b749-0889a206162c-config-data\") pod \"barbican-keystone-listener-764b94dc8b-rsfhs\" (UID: \"69f247bc-7b09-4e36-b749-0889a206162c\") " pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.114841 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.141157 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-config\") pod \"dnsmasq-dns-75c8ddd69c-slgbb\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.141204 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-slgbb\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.141240 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-slgbb\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.141260 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3c852e4-ed87-4a74-9660-fcd02babe2e5-combined-ca-bundle\") pod \"barbican-api-6b8bf7879d-9q9nt\" (UID: \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\") " pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.141299 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c474g\" (UniqueName: \"kubernetes.io/projected/92f70fbe-b165-4384-a087-4f6afd932cf1-kube-api-access-c474g\") pod \"dnsmasq-dns-75c8ddd69c-slgbb\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.141320 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f3c852e4-ed87-4a74-9660-fcd02babe2e5-config-data-custom\") pod \"barbican-api-6b8bf7879d-9q9nt\" (UID: \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\") " pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.141348 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3c852e4-ed87-4a74-9660-fcd02babe2e5-logs\") pod \"barbican-api-6b8bf7879d-9q9nt\" (UID: \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\") " pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.141374 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkwcj\" (UniqueName: \"kubernetes.io/projected/f3c852e4-ed87-4a74-9660-fcd02babe2e5-kube-api-access-bkwcj\") pod \"barbican-api-6b8bf7879d-9q9nt\" (UID: \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\") " pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.141389 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-slgbb\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.141420 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-slgbb\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.141475 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3c852e4-ed87-4a74-9660-fcd02babe2e5-config-data\") pod \"barbican-api-6b8bf7879d-9q9nt\" (UID: \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\") " pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.142276 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-config\") pod \"dnsmasq-dns-75c8ddd69c-slgbb\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.142299 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-slgbb\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.142899 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-slgbb\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.143124 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3c852e4-ed87-4a74-9660-fcd02babe2e5-logs\") pod \"barbican-api-6b8bf7879d-9q9nt\" (UID: \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\") " pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.143617 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-slgbb\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.145059 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-slgbb\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.147427 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3c852e4-ed87-4a74-9660-fcd02babe2e5-config-data\") pod \"barbican-api-6b8bf7879d-9q9nt\" (UID: \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\") " pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.153709 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3c852e4-ed87-4a74-9660-fcd02babe2e5-combined-ca-bundle\") pod \"barbican-api-6b8bf7879d-9q9nt\" (UID: \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\") " pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.154284 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f3c852e4-ed87-4a74-9660-fcd02babe2e5-config-data-custom\") pod \"barbican-api-6b8bf7879d-9q9nt\" (UID: \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\") " pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.160621 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkwcj\" (UniqueName: \"kubernetes.io/projected/f3c852e4-ed87-4a74-9660-fcd02babe2e5-kube-api-access-bkwcj\") pod \"barbican-api-6b8bf7879d-9q9nt\" (UID: \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\") " pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.163443 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c474g\" (UniqueName: \"kubernetes.io/projected/92f70fbe-b165-4384-a087-4f6afd932cf1-kube-api-access-c474g\") pod \"dnsmasq-dns-75c8ddd69c-slgbb\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.282358 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.286415 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.443499 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3efcbfdc-c939-452c-96e5-244bc7b7bff3","Type":"ContainerStarted","Data":"d89c045cde6d248f1d79376e899ec9ad392128e24909600924f9d6835e17be01"} Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.444104 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.444020 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3efcbfdc-c939-452c-96e5-244bc7b7bff3" containerName="proxy-httpd" containerID="cri-o://d89c045cde6d248f1d79376e899ec9ad392128e24909600924f9d6835e17be01" gracePeriod=30 Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.443729 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3efcbfdc-c939-452c-96e5-244bc7b7bff3" containerName="ceilometer-notification-agent" containerID="cri-o://455d26f111af53890f2941dc4d58fcd120946cf076ba6d2bee17153851548f6a" gracePeriod=30 Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.444027 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3efcbfdc-c939-452c-96e5-244bc7b7bff3" containerName="sg-core" containerID="cri-o://3b72172354d849e8eef7780aa60b2ab13a76af71b6590602e03d7a9fe8d132e2" gracePeriod=30 Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.478516 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-qwzxp" event={"ID":"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5","Type":"ContainerStarted","Data":"15bcf4a1213bb2857aaa2d46c105a7cf69375731605bc128731ea2b59f3e50f3"} Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.514994 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-qwzxp" podStartSLOduration=4.08759012 podStartE2EDuration="48.514975293s" podCreationTimestamp="2025-09-30 00:29:22 +0000 UTC" firstStartedPulling="2025-09-30 00:29:24.753789777 +0000 UTC m=+1215.790039185" lastFinishedPulling="2025-09-30 00:30:09.18117495 +0000 UTC m=+1260.217424358" observedRunningTime="2025-09-30 00:30:10.498297679 +0000 UTC m=+1261.534547087" watchObservedRunningTime="2025-09-30 00:30:10.514975293 +0000 UTC m=+1261.551224701" Sep 30 00:30:10 crc kubenswrapper[4809]: W0930 00:30:10.564739 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08e2596d_d7a4_46ea_ab20_054084e61605.slice/crio-34be594e659606fb1a8c97e7eb42fcc72912577bc66b423a62f254c80da1c7a5 WatchSource:0}: Error finding container 34be594e659606fb1a8c97e7eb42fcc72912577bc66b423a62f254c80da1c7a5: Status 404 returned error can't find the container with id 34be594e659606fb1a8c97e7eb42fcc72912577bc66b423a62f254c80da1c7a5 Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.587976 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6b8d67c9f-x6rts"] Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.693093 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-764b94dc8b-rsfhs"] Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.905654 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6b8bf7879d-9q9nt"] Sep 30 00:30:10 crc kubenswrapper[4809]: I0930 00:30:10.929733 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-slgbb"] Sep 30 00:30:10 crc kubenswrapper[4809]: W0930 00:30:10.940925 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92f70fbe_b165_4384_a087_4f6afd932cf1.slice/crio-719714a164cd4de5f2d1128c16fc03923d6c1de1f2cd73f81498ae38911e18af WatchSource:0}: Error finding container 719714a164cd4de5f2d1128c16fc03923d6c1de1f2cd73f81498ae38911e18af: Status 404 returned error can't find the container with id 719714a164cd4de5f2d1128c16fc03923d6c1de1f2cd73f81498ae38911e18af Sep 30 00:30:11 crc kubenswrapper[4809]: I0930 00:30:11.495549 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b8bf7879d-9q9nt" event={"ID":"f3c852e4-ed87-4a74-9660-fcd02babe2e5","Type":"ContainerStarted","Data":"8142e13ebd8d44e9d10909b902df5281cea7dc9bfd55c48d4c6b1fac3c577db7"} Sep 30 00:30:11 crc kubenswrapper[4809]: I0930 00:30:11.496830 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b8bf7879d-9q9nt" event={"ID":"f3c852e4-ed87-4a74-9660-fcd02babe2e5","Type":"ContainerStarted","Data":"d115a77a59d1cf73b28846d906ab5b615a72a6d081d62b968741ece41b5e17f5"} Sep 30 00:30:11 crc kubenswrapper[4809]: I0930 00:30:11.496866 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b8bf7879d-9q9nt" event={"ID":"f3c852e4-ed87-4a74-9660-fcd02babe2e5","Type":"ContainerStarted","Data":"d98e84ad669890b487383e1572dd35bacb122f99d76fee689ab315ebdcb579f0"} Sep 30 00:30:11 crc kubenswrapper[4809]: I0930 00:30:11.496940 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:11 crc kubenswrapper[4809]: I0930 00:30:11.496958 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:11 crc kubenswrapper[4809]: I0930 00:30:11.507589 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b8d67c9f-x6rts" event={"ID":"08e2596d-d7a4-46ea-ab20-054084e61605","Type":"ContainerStarted","Data":"34be594e659606fb1a8c97e7eb42fcc72912577bc66b423a62f254c80da1c7a5"} Sep 30 00:30:11 crc kubenswrapper[4809]: I0930 00:30:11.512433 4809 generic.go:334] "Generic (PLEG): container finished" podID="92f70fbe-b165-4384-a087-4f6afd932cf1" containerID="9795c7ddf97ff33a3362f43ff48975ee4d71f34c0efbd0827cd79aea5ac4f24d" exitCode=0 Sep 30 00:30:11 crc kubenswrapper[4809]: I0930 00:30:11.512528 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" event={"ID":"92f70fbe-b165-4384-a087-4f6afd932cf1","Type":"ContainerDied","Data":"9795c7ddf97ff33a3362f43ff48975ee4d71f34c0efbd0827cd79aea5ac4f24d"} Sep 30 00:30:11 crc kubenswrapper[4809]: I0930 00:30:11.512557 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" event={"ID":"92f70fbe-b165-4384-a087-4f6afd932cf1","Type":"ContainerStarted","Data":"719714a164cd4de5f2d1128c16fc03923d6c1de1f2cd73f81498ae38911e18af"} Sep 30 00:30:11 crc kubenswrapper[4809]: I0930 00:30:11.517693 4809 generic.go:334] "Generic (PLEG): container finished" podID="3efcbfdc-c939-452c-96e5-244bc7b7bff3" containerID="d89c045cde6d248f1d79376e899ec9ad392128e24909600924f9d6835e17be01" exitCode=0 Sep 30 00:30:11 crc kubenswrapper[4809]: I0930 00:30:11.517716 4809 generic.go:334] "Generic (PLEG): container finished" podID="3efcbfdc-c939-452c-96e5-244bc7b7bff3" containerID="3b72172354d849e8eef7780aa60b2ab13a76af71b6590602e03d7a9fe8d132e2" exitCode=2 Sep 30 00:30:11 crc kubenswrapper[4809]: I0930 00:30:11.517748 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3efcbfdc-c939-452c-96e5-244bc7b7bff3","Type":"ContainerDied","Data":"d89c045cde6d248f1d79376e899ec9ad392128e24909600924f9d6835e17be01"} Sep 30 00:30:11 crc kubenswrapper[4809]: I0930 00:30:11.517771 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3efcbfdc-c939-452c-96e5-244bc7b7bff3","Type":"ContainerDied","Data":"3b72172354d849e8eef7780aa60b2ab13a76af71b6590602e03d7a9fe8d132e2"} Sep 30 00:30:11 crc kubenswrapper[4809]: I0930 00:30:11.521584 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" event={"ID":"69f247bc-7b09-4e36-b749-0889a206162c","Type":"ContainerStarted","Data":"602448f48355df3841646efd36f09d89c85064610bf95e5d791ea7ede9c99b99"} Sep 30 00:30:11 crc kubenswrapper[4809]: I0930 00:30:11.531014 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6b8bf7879d-9q9nt" podStartSLOduration=2.530997773 podStartE2EDuration="2.530997773s" podCreationTimestamp="2025-09-30 00:30:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:30:11.51948438 +0000 UTC m=+1262.555733808" watchObservedRunningTime="2025-09-30 00:30:11.530997773 +0000 UTC m=+1262.567247181" Sep 30 00:30:12 crc kubenswrapper[4809]: I0930 00:30:12.531485 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" event={"ID":"92f70fbe-b165-4384-a087-4f6afd932cf1","Type":"ContainerStarted","Data":"82f7460e719e9f9f2c48cf62fb0daffac2fbbcf44197035708167cbef7ad0678"} Sep 30 00:30:12 crc kubenswrapper[4809]: I0930 00:30:12.531946 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:12 crc kubenswrapper[4809]: I0930 00:30:12.867068 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" podStartSLOduration=3.867044747 podStartE2EDuration="3.867044747s" podCreationTimestamp="2025-09-30 00:30:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:30:12.560361528 +0000 UTC m=+1263.596610946" watchObservedRunningTime="2025-09-30 00:30:12.867044747 +0000 UTC m=+1263.903294165" Sep 30 00:30:12 crc kubenswrapper[4809]: I0930 00:30:12.870202 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6c8df69b5b-8qc6t"] Sep 30 00:30:12 crc kubenswrapper[4809]: I0930 00:30:12.872058 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:12 crc kubenswrapper[4809]: I0930 00:30:12.879004 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Sep 30 00:30:12 crc kubenswrapper[4809]: I0930 00:30:12.879065 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Sep 30 00:30:12 crc kubenswrapper[4809]: I0930 00:30:12.888651 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6c8df69b5b-8qc6t"] Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.009918 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99531336-93a0-4d03-a774-923e22900476-combined-ca-bundle\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.009962 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjld4\" (UniqueName: \"kubernetes.io/projected/99531336-93a0-4d03-a774-923e22900476-kube-api-access-cjld4\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.010003 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99531336-93a0-4d03-a774-923e22900476-logs\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.010078 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/99531336-93a0-4d03-a774-923e22900476-internal-tls-certs\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.010105 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/99531336-93a0-4d03-a774-923e22900476-public-tls-certs\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.010132 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/99531336-93a0-4d03-a774-923e22900476-config-data-custom\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.010185 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99531336-93a0-4d03-a774-923e22900476-config-data\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.113352 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99531336-93a0-4d03-a774-923e22900476-logs\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.113496 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/99531336-93a0-4d03-a774-923e22900476-internal-tls-certs\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.113534 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/99531336-93a0-4d03-a774-923e22900476-public-tls-certs\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.113574 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/99531336-93a0-4d03-a774-923e22900476-config-data-custom\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.113719 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99531336-93a0-4d03-a774-923e22900476-config-data\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.113821 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99531336-93a0-4d03-a774-923e22900476-combined-ca-bundle\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.113852 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjld4\" (UniqueName: \"kubernetes.io/projected/99531336-93a0-4d03-a774-923e22900476-kube-api-access-cjld4\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.114732 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99531336-93a0-4d03-a774-923e22900476-logs\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.121112 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99531336-93a0-4d03-a774-923e22900476-config-data\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.121571 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/99531336-93a0-4d03-a774-923e22900476-config-data-custom\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.125329 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99531336-93a0-4d03-a774-923e22900476-combined-ca-bundle\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.128962 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/99531336-93a0-4d03-a774-923e22900476-public-tls-certs\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.131696 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/99531336-93a0-4d03-a774-923e22900476-internal-tls-certs\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.140255 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjld4\" (UniqueName: \"kubernetes.io/projected/99531336-93a0-4d03-a774-923e22900476-kube-api-access-cjld4\") pod \"barbican-api-6c8df69b5b-8qc6t\" (UID: \"99531336-93a0-4d03-a774-923e22900476\") " pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.215343 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.566245 4809 generic.go:334] "Generic (PLEG): container finished" podID="3efcbfdc-c939-452c-96e5-244bc7b7bff3" containerID="455d26f111af53890f2941dc4d58fcd120946cf076ba6d2bee17153851548f6a" exitCode=0 Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.566338 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3efcbfdc-c939-452c-96e5-244bc7b7bff3","Type":"ContainerDied","Data":"455d26f111af53890f2941dc4d58fcd120946cf076ba6d2bee17153851548f6a"} Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.672764 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.727012 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-config-data\") pod \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.727135 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3efcbfdc-c939-452c-96e5-244bc7b7bff3-log-httpd\") pod \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.727246 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-sg-core-conf-yaml\") pod \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.727278 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-combined-ca-bundle\") pod \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.727305 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-scripts\") pod \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.727325 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqjgv\" (UniqueName: \"kubernetes.io/projected/3efcbfdc-c939-452c-96e5-244bc7b7bff3-kube-api-access-bqjgv\") pod \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.727403 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3efcbfdc-c939-452c-96e5-244bc7b7bff3-run-httpd\") pod \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\" (UID: \"3efcbfdc-c939-452c-96e5-244bc7b7bff3\") " Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.728041 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3efcbfdc-c939-452c-96e5-244bc7b7bff3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3efcbfdc-c939-452c-96e5-244bc7b7bff3" (UID: "3efcbfdc-c939-452c-96e5-244bc7b7bff3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.735016 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3efcbfdc-c939-452c-96e5-244bc7b7bff3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3efcbfdc-c939-452c-96e5-244bc7b7bff3" (UID: "3efcbfdc-c939-452c-96e5-244bc7b7bff3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.742193 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-scripts" (OuterVolumeSpecName: "scripts") pod "3efcbfdc-c939-452c-96e5-244bc7b7bff3" (UID: "3efcbfdc-c939-452c-96e5-244bc7b7bff3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.748616 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3efcbfdc-c939-452c-96e5-244bc7b7bff3-kube-api-access-bqjgv" (OuterVolumeSpecName: "kube-api-access-bqjgv") pod "3efcbfdc-c939-452c-96e5-244bc7b7bff3" (UID: "3efcbfdc-c939-452c-96e5-244bc7b7bff3"). InnerVolumeSpecName "kube-api-access-bqjgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.780371 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3efcbfdc-c939-452c-96e5-244bc7b7bff3" (UID: "3efcbfdc-c939-452c-96e5-244bc7b7bff3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.829665 4809 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.829691 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.829703 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqjgv\" (UniqueName: \"kubernetes.io/projected/3efcbfdc-c939-452c-96e5-244bc7b7bff3-kube-api-access-bqjgv\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.829712 4809 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3efcbfdc-c939-452c-96e5-244bc7b7bff3-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.829720 4809 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3efcbfdc-c939-452c-96e5-244bc7b7bff3-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.830727 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3efcbfdc-c939-452c-96e5-244bc7b7bff3" (UID: "3efcbfdc-c939-452c-96e5-244bc7b7bff3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.843302 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6c8df69b5b-8qc6t"] Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.847760 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-config-data" (OuterVolumeSpecName: "config-data") pod "3efcbfdc-c939-452c-96e5-244bc7b7bff3" (UID: "3efcbfdc-c939-452c-96e5-244bc7b7bff3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.931980 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:13 crc kubenswrapper[4809]: I0930 00:30:13.932017 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3efcbfdc-c939-452c-96e5-244bc7b7bff3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.577497 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3efcbfdc-c939-452c-96e5-244bc7b7bff3","Type":"ContainerDied","Data":"9840dec362e5197a35ce62ed481e7519936bbb9f14f8934b7b467335949b028a"} Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.577527 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.577843 4809 scope.go:117] "RemoveContainer" containerID="d89c045cde6d248f1d79376e899ec9ad392128e24909600924f9d6835e17be01" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.587910 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" event={"ID":"69f247bc-7b09-4e36-b749-0889a206162c","Type":"ContainerStarted","Data":"9223545c6d6a9991e5ccd8a6e66de27c517fd239607b0e646cb825f953eb1913"} Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.587962 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" event={"ID":"69f247bc-7b09-4e36-b749-0889a206162c","Type":"ContainerStarted","Data":"3f076cc9f0fdda3807cde1049d59623a806bcb96792ae9edce465a89118842c5"} Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.593400 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6c8df69b5b-8qc6t" event={"ID":"99531336-93a0-4d03-a774-923e22900476","Type":"ContainerStarted","Data":"7c76d486b4deb608462ebbdb35b634d0883d3939564a6a234e920f6d90073b81"} Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.593451 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.593465 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6c8df69b5b-8qc6t" event={"ID":"99531336-93a0-4d03-a774-923e22900476","Type":"ContainerStarted","Data":"718615766ed282a8f80c5de2633046781c77cb765ba3a89d49c4981c782e80ef"} Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.593477 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6c8df69b5b-8qc6t" event={"ID":"99531336-93a0-4d03-a774-923e22900476","Type":"ContainerStarted","Data":"2e7d785489161332247c428634b7bcc56978fd5bd95c95bc86f4db1f69f8d95d"} Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.593521 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.595937 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b8d67c9f-x6rts" event={"ID":"08e2596d-d7a4-46ea-ab20-054084e61605","Type":"ContainerStarted","Data":"bb63a043e80a2784345d87d66a375517e66e48c6cb55bf05b9c53b8a96f77374"} Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.595973 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b8d67c9f-x6rts" event={"ID":"08e2596d-d7a4-46ea-ab20-054084e61605","Type":"ContainerStarted","Data":"1e0bef1d8312dd788e4a31556dfd22efcde803b347301ac0eec49410a96c9035"} Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.608042 4809 scope.go:117] "RemoveContainer" containerID="3b72172354d849e8eef7780aa60b2ab13a76af71b6590602e03d7a9fe8d132e2" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.608528 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-764b94dc8b-rsfhs" podStartSLOduration=2.989356123 podStartE2EDuration="5.608515439s" podCreationTimestamp="2025-09-30 00:30:09 +0000 UTC" firstStartedPulling="2025-09-30 00:30:10.721834984 +0000 UTC m=+1261.758084392" lastFinishedPulling="2025-09-30 00:30:13.3409943 +0000 UTC m=+1264.377243708" observedRunningTime="2025-09-30 00:30:14.606870604 +0000 UTC m=+1265.643120012" watchObservedRunningTime="2025-09-30 00:30:14.608515439 +0000 UTC m=+1265.644764847" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.643240 4809 scope.go:117] "RemoveContainer" containerID="455d26f111af53890f2941dc4d58fcd120946cf076ba6d2bee17153851548f6a" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.665693 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.684342 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.690278 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6b8d67c9f-x6rts" podStartSLOduration=2.929170733 podStartE2EDuration="5.690255474s" podCreationTimestamp="2025-09-30 00:30:09 +0000 UTC" firstStartedPulling="2025-09-30 00:30:10.578854951 +0000 UTC m=+1261.615104359" lastFinishedPulling="2025-09-30 00:30:13.339939692 +0000 UTC m=+1264.376189100" observedRunningTime="2025-09-30 00:30:14.679518742 +0000 UTC m=+1265.715768150" watchObservedRunningTime="2025-09-30 00:30:14.690255474 +0000 UTC m=+1265.726504892" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.719620 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:30:14 crc kubenswrapper[4809]: E0930 00:30:14.720017 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3efcbfdc-c939-452c-96e5-244bc7b7bff3" containerName="ceilometer-notification-agent" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.720033 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="3efcbfdc-c939-452c-96e5-244bc7b7bff3" containerName="ceilometer-notification-agent" Sep 30 00:30:14 crc kubenswrapper[4809]: E0930 00:30:14.720056 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3efcbfdc-c939-452c-96e5-244bc7b7bff3" containerName="proxy-httpd" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.720061 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="3efcbfdc-c939-452c-96e5-244bc7b7bff3" containerName="proxy-httpd" Sep 30 00:30:14 crc kubenswrapper[4809]: E0930 00:30:14.720072 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3efcbfdc-c939-452c-96e5-244bc7b7bff3" containerName="sg-core" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.720079 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="3efcbfdc-c939-452c-96e5-244bc7b7bff3" containerName="sg-core" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.720259 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="3efcbfdc-c939-452c-96e5-244bc7b7bff3" containerName="sg-core" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.720296 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="3efcbfdc-c939-452c-96e5-244bc7b7bff3" containerName="ceilometer-notification-agent" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.720308 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="3efcbfdc-c939-452c-96e5-244bc7b7bff3" containerName="proxy-httpd" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.721941 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.725864 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.729076 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.734527 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6c8df69b5b-8qc6t" podStartSLOduration=2.734507539 podStartE2EDuration="2.734507539s" podCreationTimestamp="2025-09-30 00:30:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:30:14.700101673 +0000 UTC m=+1265.736351081" watchObservedRunningTime="2025-09-30 00:30:14.734507539 +0000 UTC m=+1265.770756947" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.735308 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.852960 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-config-data\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.853233 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5588ead-01ab-4225-bdf0-17270bff107a-run-httpd\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.853257 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5588ead-01ab-4225-bdf0-17270bff107a-log-httpd\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.853305 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-scripts\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.853352 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72ljw\" (UniqueName: \"kubernetes.io/projected/a5588ead-01ab-4225-bdf0-17270bff107a-kube-api-access-72ljw\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.853483 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.853585 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.955760 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.955850 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.955930 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-config-data\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.955994 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5588ead-01ab-4225-bdf0-17270bff107a-run-httpd\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.956025 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5588ead-01ab-4225-bdf0-17270bff107a-log-httpd\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.956081 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-scripts\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.956111 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72ljw\" (UniqueName: \"kubernetes.io/projected/a5588ead-01ab-4225-bdf0-17270bff107a-kube-api-access-72ljw\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.956587 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5588ead-01ab-4225-bdf0-17270bff107a-run-httpd\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.956923 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5588ead-01ab-4225-bdf0-17270bff107a-log-httpd\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.960769 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-scripts\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.961047 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.961323 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.962340 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-config-data\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:14 crc kubenswrapper[4809]: I0930 00:30:14.976690 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72ljw\" (UniqueName: \"kubernetes.io/projected/a5588ead-01ab-4225-bdf0-17270bff107a-kube-api-access-72ljw\") pod \"ceilometer-0\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " pod="openstack/ceilometer-0" Sep 30 00:30:15 crc kubenswrapper[4809]: I0930 00:30:15.070579 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:30:15 crc kubenswrapper[4809]: I0930 00:30:15.580597 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:30:15 crc kubenswrapper[4809]: I0930 00:30:15.606778 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5588ead-01ab-4225-bdf0-17270bff107a","Type":"ContainerStarted","Data":"8ac0d15adae056e478a7c5cf6da435151d4fe375f99d9397c0d8d144d4617438"} Sep 30 00:30:15 crc kubenswrapper[4809]: I0930 00:30:15.608403 4809 generic.go:334] "Generic (PLEG): container finished" podID="23ff9291-ad89-46a6-9d4a-adf9e545adb2" containerID="8b626b4d4e767700ac33232df90ec4d60f87c18ee0c45a30f5c6c8ddb44d31a0" exitCode=0 Sep 30 00:30:15 crc kubenswrapper[4809]: I0930 00:30:15.608554 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-qttpq" event={"ID":"23ff9291-ad89-46a6-9d4a-adf9e545adb2","Type":"ContainerDied","Data":"8b626b4d4e767700ac33232df90ec4d60f87c18ee0c45a30f5c6c8ddb44d31a0"} Sep 30 00:30:15 crc kubenswrapper[4809]: I0930 00:30:15.703111 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3efcbfdc-c939-452c-96e5-244bc7b7bff3" path="/var/lib/kubelet/pods/3efcbfdc-c939-452c-96e5-244bc7b7bff3/volumes" Sep 30 00:30:16 crc kubenswrapper[4809]: I0930 00:30:16.617904 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5588ead-01ab-4225-bdf0-17270bff107a","Type":"ContainerStarted","Data":"79b576470e558a67838f1d9d0c37245031cb3921ada5cd367ecc5556835d59be"} Sep 30 00:30:17 crc kubenswrapper[4809]: I0930 00:30:17.114385 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-qttpq" Sep 30 00:30:17 crc kubenswrapper[4809]: I0930 00:30:17.205228 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23ff9291-ad89-46a6-9d4a-adf9e545adb2-combined-ca-bundle\") pod \"23ff9291-ad89-46a6-9d4a-adf9e545adb2\" (UID: \"23ff9291-ad89-46a6-9d4a-adf9e545adb2\") " Sep 30 00:30:17 crc kubenswrapper[4809]: I0930 00:30:17.205355 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23ff9291-ad89-46a6-9d4a-adf9e545adb2-config-data\") pod \"23ff9291-ad89-46a6-9d4a-adf9e545adb2\" (UID: \"23ff9291-ad89-46a6-9d4a-adf9e545adb2\") " Sep 30 00:30:17 crc kubenswrapper[4809]: I0930 00:30:17.205471 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzxz6\" (UniqueName: \"kubernetes.io/projected/23ff9291-ad89-46a6-9d4a-adf9e545adb2-kube-api-access-zzxz6\") pod \"23ff9291-ad89-46a6-9d4a-adf9e545adb2\" (UID: \"23ff9291-ad89-46a6-9d4a-adf9e545adb2\") " Sep 30 00:30:17 crc kubenswrapper[4809]: I0930 00:30:17.220419 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23ff9291-ad89-46a6-9d4a-adf9e545adb2-kube-api-access-zzxz6" (OuterVolumeSpecName: "kube-api-access-zzxz6") pod "23ff9291-ad89-46a6-9d4a-adf9e545adb2" (UID: "23ff9291-ad89-46a6-9d4a-adf9e545adb2"). InnerVolumeSpecName "kube-api-access-zzxz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:17 crc kubenswrapper[4809]: I0930 00:30:17.243751 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23ff9291-ad89-46a6-9d4a-adf9e545adb2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "23ff9291-ad89-46a6-9d4a-adf9e545adb2" (UID: "23ff9291-ad89-46a6-9d4a-adf9e545adb2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:17 crc kubenswrapper[4809]: I0930 00:30:17.297779 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23ff9291-ad89-46a6-9d4a-adf9e545adb2-config-data" (OuterVolumeSpecName: "config-data") pod "23ff9291-ad89-46a6-9d4a-adf9e545adb2" (UID: "23ff9291-ad89-46a6-9d4a-adf9e545adb2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:17 crc kubenswrapper[4809]: I0930 00:30:17.308474 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23ff9291-ad89-46a6-9d4a-adf9e545adb2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:17 crc kubenswrapper[4809]: I0930 00:30:17.308504 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23ff9291-ad89-46a6-9d4a-adf9e545adb2-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:17 crc kubenswrapper[4809]: I0930 00:30:17.308513 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzxz6\" (UniqueName: \"kubernetes.io/projected/23ff9291-ad89-46a6-9d4a-adf9e545adb2-kube-api-access-zzxz6\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:17 crc kubenswrapper[4809]: I0930 00:30:17.635564 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-qttpq" Sep 30 00:30:17 crc kubenswrapper[4809]: I0930 00:30:17.635566 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-qttpq" event={"ID":"23ff9291-ad89-46a6-9d4a-adf9e545adb2","Type":"ContainerDied","Data":"188956dc3f1d0cfa16a552ee6f0c83e959e66752799a9877fe6a3d166dd17672"} Sep 30 00:30:17 crc kubenswrapper[4809]: I0930 00:30:17.635681 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="188956dc3f1d0cfa16a552ee6f0c83e959e66752799a9877fe6a3d166dd17672" Sep 30 00:30:17 crc kubenswrapper[4809]: I0930 00:30:17.653615 4809 generic.go:334] "Generic (PLEG): container finished" podID="f26e19e3-0ece-4f4d-aa5a-016fbfd929a5" containerID="15bcf4a1213bb2857aaa2d46c105a7cf69375731605bc128731ea2b59f3e50f3" exitCode=0 Sep 30 00:30:17 crc kubenswrapper[4809]: I0930 00:30:17.654913 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-qwzxp" event={"ID":"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5","Type":"ContainerDied","Data":"15bcf4a1213bb2857aaa2d46c105a7cf69375731605bc128731ea2b59f3e50f3"} Sep 30 00:30:18 crc kubenswrapper[4809]: I0930 00:30:18.666848 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5588ead-01ab-4225-bdf0-17270bff107a","Type":"ContainerStarted","Data":"3ad6813185ae2f11af12622feed0a1aef0044298c4c2250e9a57a37084bf82d3"} Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.123050 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.249464 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sw25b\" (UniqueName: \"kubernetes.io/projected/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-kube-api-access-sw25b\") pod \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.249514 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-config-data\") pod \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.250046 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-scripts\") pod \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.250072 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-combined-ca-bundle\") pod \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.250110 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-etc-machine-id\") pod \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.250168 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-db-sync-config-data\") pod \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\" (UID: \"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5\") " Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.250565 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f26e19e3-0ece-4f4d-aa5a-016fbfd929a5" (UID: "f26e19e3-0ece-4f4d-aa5a-016fbfd929a5"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.257100 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-kube-api-access-sw25b" (OuterVolumeSpecName: "kube-api-access-sw25b") pod "f26e19e3-0ece-4f4d-aa5a-016fbfd929a5" (UID: "f26e19e3-0ece-4f4d-aa5a-016fbfd929a5"). InnerVolumeSpecName "kube-api-access-sw25b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.257336 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "f26e19e3-0ece-4f4d-aa5a-016fbfd929a5" (UID: "f26e19e3-0ece-4f4d-aa5a-016fbfd929a5"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.257383 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-scripts" (OuterVolumeSpecName: "scripts") pod "f26e19e3-0ece-4f4d-aa5a-016fbfd929a5" (UID: "f26e19e3-0ece-4f4d-aa5a-016fbfd929a5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.311039 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f26e19e3-0ece-4f4d-aa5a-016fbfd929a5" (UID: "f26e19e3-0ece-4f4d-aa5a-016fbfd929a5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.323777 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-config-data" (OuterVolumeSpecName: "config-data") pod "f26e19e3-0ece-4f4d-aa5a-016fbfd929a5" (UID: "f26e19e3-0ece-4f4d-aa5a-016fbfd929a5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.352063 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.352105 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.352122 4809 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.352133 4809 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.352143 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sw25b\" (UniqueName: \"kubernetes.io/projected/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-kube-api-access-sw25b\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.352151 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.679840 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5588ead-01ab-4225-bdf0-17270bff107a","Type":"ContainerStarted","Data":"b0f6b297d991b755574922aa95c689560b0a91ef36ce142e30db7ca988566e15"} Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.682750 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-qwzxp" event={"ID":"f26e19e3-0ece-4f4d-aa5a-016fbfd929a5","Type":"ContainerDied","Data":"75b319ba399b2ab4ccbce39163f239d221e7474d21ec29cc7e2a12223d2ca1c2"} Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.682783 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="75b319ba399b2ab4ccbce39163f239d221e7474d21ec29cc7e2a12223d2ca1c2" Sep 30 00:30:19 crc kubenswrapper[4809]: I0930 00:30:19.682865 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-qwzxp" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:19.994258 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 00:30:20 crc kubenswrapper[4809]: E0930 00:30:19.996301 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23ff9291-ad89-46a6-9d4a-adf9e545adb2" containerName="heat-db-sync" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:19.996319 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="23ff9291-ad89-46a6-9d4a-adf9e545adb2" containerName="heat-db-sync" Sep 30 00:30:20 crc kubenswrapper[4809]: E0930 00:30:19.996333 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f26e19e3-0ece-4f4d-aa5a-016fbfd929a5" containerName="cinder-db-sync" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:19.996362 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="f26e19e3-0ece-4f4d-aa5a-016fbfd929a5" containerName="cinder-db-sync" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:19.996698 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="23ff9291-ad89-46a6-9d4a-adf9e545adb2" containerName="heat-db-sync" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:19.996716 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="f26e19e3-0ece-4f4d-aa5a-016fbfd929a5" containerName="cinder-db-sync" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:19.998079 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.003220 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.003530 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-p2vzz" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.003663 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.003762 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.053219 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.064767 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-scripts\") pod \"cinder-scheduler-0\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.064829 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.064853 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7f997756-ee76-40da-bdd7-baa8055994fc-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.064888 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.064907 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bz5qs\" (UniqueName: \"kubernetes.io/projected/7f997756-ee76-40da-bdd7-baa8055994fc-kube-api-access-bz5qs\") pod \"cinder-scheduler-0\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.065044 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-config-data\") pod \"cinder-scheduler-0\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.113057 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-slgbb"] Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.113292 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" podUID="92f70fbe-b165-4384-a087-4f6afd932cf1" containerName="dnsmasq-dns" containerID="cri-o://82f7460e719e9f9f2c48cf62fb0daffac2fbbcf44197035708167cbef7ad0678" gracePeriod=10 Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.119794 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.124425 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-6xtp8"] Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.131879 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.140942 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-6xtp8"] Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.167673 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.167717 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bz5qs\" (UniqueName: \"kubernetes.io/projected/7f997756-ee76-40da-bdd7-baa8055994fc-kube-api-access-bz5qs\") pod \"cinder-scheduler-0\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.167743 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-6xtp8\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.167781 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-dns-svc\") pod \"dnsmasq-dns-5784cf869f-6xtp8\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.167821 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-6xtp8\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.167843 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jd56p\" (UniqueName: \"kubernetes.io/projected/c6150622-6dfa-40f4-8d82-13e29dbcae08-kube-api-access-jd56p\") pod \"dnsmasq-dns-5784cf869f-6xtp8\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.167903 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-config\") pod \"dnsmasq-dns-5784cf869f-6xtp8\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.167932 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-config-data\") pod \"cinder-scheduler-0\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.167973 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-6xtp8\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.168019 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-scripts\") pod \"cinder-scheduler-0\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.168049 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.168070 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7f997756-ee76-40da-bdd7-baa8055994fc-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.168146 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7f997756-ee76-40da-bdd7-baa8055994fc-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.178202 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.204905 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-scripts\") pod \"cinder-scheduler-0\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.205958 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.213345 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-config-data\") pod \"cinder-scheduler-0\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.214359 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bz5qs\" (UniqueName: \"kubernetes.io/projected/7f997756-ee76-40da-bdd7-baa8055994fc-kube-api-access-bz5qs\") pod \"cinder-scheduler-0\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.272568 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-6xtp8\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.272629 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-dns-svc\") pod \"dnsmasq-dns-5784cf869f-6xtp8\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.272687 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-6xtp8\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.272706 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jd56p\" (UniqueName: \"kubernetes.io/projected/c6150622-6dfa-40f4-8d82-13e29dbcae08-kube-api-access-jd56p\") pod \"dnsmasq-dns-5784cf869f-6xtp8\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.272761 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-config\") pod \"dnsmasq-dns-5784cf869f-6xtp8\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.272801 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-6xtp8\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.274020 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-6xtp8\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.274628 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-config\") pod \"dnsmasq-dns-5784cf869f-6xtp8\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.274708 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-6xtp8\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.274979 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-dns-svc\") pod \"dnsmasq-dns-5784cf869f-6xtp8\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.275588 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-6xtp8\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.284972 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" podUID="92f70fbe-b165-4384-a087-4f6afd932cf1" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.183:5353: connect: connection refused" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.329784 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jd56p\" (UniqueName: \"kubernetes.io/projected/c6150622-6dfa-40f4-8d82-13e29dbcae08-kube-api-access-jd56p\") pod \"dnsmasq-dns-5784cf869f-6xtp8\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.335713 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.337346 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.343974 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.381551 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b17561b2-81f8-4de9-817f-6e88f05730a9-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.381623 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b17561b2-81f8-4de9-817f-6e88f05730a9-logs\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.381705 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-config-data-custom\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.381744 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqp6k\" (UniqueName: \"kubernetes.io/projected/b17561b2-81f8-4de9-817f-6e88f05730a9-kube-api-access-vqp6k\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.381786 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.381845 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-config-data\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.381896 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-scripts\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.389708 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.459595 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.470253 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.483239 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-config-data\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.483315 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-scripts\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.483387 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b17561b2-81f8-4de9-817f-6e88f05730a9-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.483435 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b17561b2-81f8-4de9-817f-6e88f05730a9-logs\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.483520 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-config-data-custom\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.483553 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqp6k\" (UniqueName: \"kubernetes.io/projected/b17561b2-81f8-4de9-817f-6e88f05730a9-kube-api-access-vqp6k\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.483590 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.485166 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b17561b2-81f8-4de9-817f-6e88f05730a9-logs\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.497224 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b17561b2-81f8-4de9-817f-6e88f05730a9-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.504467 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-scripts\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.526607 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-config-data-custom\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.529239 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.534601 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-config-data\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.572243 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqp6k\" (UniqueName: \"kubernetes.io/projected/b17561b2-81f8-4de9-817f-6e88f05730a9-kube-api-access-vqp6k\") pod \"cinder-api-0\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.791823 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.829804 4809 generic.go:334] "Generic (PLEG): container finished" podID="92f70fbe-b165-4384-a087-4f6afd932cf1" containerID="82f7460e719e9f9f2c48cf62fb0daffac2fbbcf44197035708167cbef7ad0678" exitCode=0 Sep 30 00:30:20 crc kubenswrapper[4809]: I0930 00:30:20.829864 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" event={"ID":"92f70fbe-b165-4384-a087-4f6afd932cf1","Type":"ContainerDied","Data":"82f7460e719e9f9f2c48cf62fb0daffac2fbbcf44197035708167cbef7ad0678"} Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.086293 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.116951 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.140717 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-ovsdbserver-nb\") pod \"92f70fbe-b165-4384-a087-4f6afd932cf1\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.140925 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-dns-swift-storage-0\") pod \"92f70fbe-b165-4384-a087-4f6afd932cf1\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.140970 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-dns-svc\") pod \"92f70fbe-b165-4384-a087-4f6afd932cf1\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.141052 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-config\") pod \"92f70fbe-b165-4384-a087-4f6afd932cf1\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.141078 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-ovsdbserver-sb\") pod \"92f70fbe-b165-4384-a087-4f6afd932cf1\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.141115 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c474g\" (UniqueName: \"kubernetes.io/projected/92f70fbe-b165-4384-a087-4f6afd932cf1-kube-api-access-c474g\") pod \"92f70fbe-b165-4384-a087-4f6afd932cf1\" (UID: \"92f70fbe-b165-4384-a087-4f6afd932cf1\") " Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.151947 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92f70fbe-b165-4384-a087-4f6afd932cf1-kube-api-access-c474g" (OuterVolumeSpecName: "kube-api-access-c474g") pod "92f70fbe-b165-4384-a087-4f6afd932cf1" (UID: "92f70fbe-b165-4384-a087-4f6afd932cf1"). InnerVolumeSpecName "kube-api-access-c474g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.244849 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c474g\" (UniqueName: \"kubernetes.io/projected/92f70fbe-b165-4384-a087-4f6afd932cf1-kube-api-access-c474g\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.249514 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "92f70fbe-b165-4384-a087-4f6afd932cf1" (UID: "92f70fbe-b165-4384-a087-4f6afd932cf1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.273319 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "92f70fbe-b165-4384-a087-4f6afd932cf1" (UID: "92f70fbe-b165-4384-a087-4f6afd932cf1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.281206 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-config" (OuterVolumeSpecName: "config") pod "92f70fbe-b165-4384-a087-4f6afd932cf1" (UID: "92f70fbe-b165-4384-a087-4f6afd932cf1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.351165 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.351552 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.351569 4809 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.383310 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "92f70fbe-b165-4384-a087-4f6afd932cf1" (UID: "92f70fbe-b165-4384-a087-4f6afd932cf1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.408253 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "92f70fbe-b165-4384-a087-4f6afd932cf1" (UID: "92f70fbe-b165-4384-a087-4f6afd932cf1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.454026 4809 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.454058 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/92f70fbe-b165-4384-a087-4f6afd932cf1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.662480 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-6xtp8"] Sep 30 00:30:21 crc kubenswrapper[4809]: W0930 00:30:21.668630 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f997756_ee76_40da_bdd7_baa8055994fc.slice/crio-c4e1fb23fe27f63eaaf1234af7fada24e80aff966033e1bdf140d26dd1c610b9 WatchSource:0}: Error finding container c4e1fb23fe27f63eaaf1234af7fada24e80aff966033e1bdf140d26dd1c610b9: Status 404 returned error can't find the container with id c4e1fb23fe27f63eaaf1234af7fada24e80aff966033e1bdf140d26dd1c610b9 Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.679384 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.796584 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.857541 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b17561b2-81f8-4de9-817f-6e88f05730a9","Type":"ContainerStarted","Data":"e0eb1d7e32a3182e20ba65922ba45ee9d9f904e8fb5d98002e708095533f2558"} Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.869163 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5588ead-01ab-4225-bdf0-17270bff107a","Type":"ContainerStarted","Data":"9a8edb52e80b8ccba70418b15f50bb39fdddb2169416de42242c90501a80ca66"} Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.869332 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.871330 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" event={"ID":"c6150622-6dfa-40f4-8d82-13e29dbcae08","Type":"ContainerStarted","Data":"22686bcc6b3d5e830d4227822c87becb0b07ffad1fc4d5e2d4a22ce8d023a9f7"} Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.889418 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7f997756-ee76-40da-bdd7-baa8055994fc","Type":"ContainerStarted","Data":"c4e1fb23fe27f63eaaf1234af7fada24e80aff966033e1bdf140d26dd1c610b9"} Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.909410 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.519669786 podStartE2EDuration="7.909389926s" podCreationTimestamp="2025-09-30 00:30:14 +0000 UTC" firstStartedPulling="2025-09-30 00:30:15.585678863 +0000 UTC m=+1266.621928271" lastFinishedPulling="2025-09-30 00:30:19.975399003 +0000 UTC m=+1271.011648411" observedRunningTime="2025-09-30 00:30:21.907017061 +0000 UTC m=+1272.943266469" watchObservedRunningTime="2025-09-30 00:30:21.909389926 +0000 UTC m=+1272.945639334" Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.910984 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" event={"ID":"92f70fbe-b165-4384-a087-4f6afd932cf1","Type":"ContainerDied","Data":"719714a164cd4de5f2d1128c16fc03923d6c1de1f2cd73f81498ae38911e18af"} Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.911034 4809 scope.go:117] "RemoveContainer" containerID="82f7460e719e9f9f2c48cf62fb0daffac2fbbcf44197035708167cbef7ad0678" Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.911161 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-slgbb" Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.954740 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-slgbb"] Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.966125 4809 scope.go:117] "RemoveContainer" containerID="9795c7ddf97ff33a3362f43ff48975ee4d71f34c0efbd0827cd79aea5ac4f24d" Sep 30 00:30:21 crc kubenswrapper[4809]: I0930 00:30:21.974584 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-slgbb"] Sep 30 00:30:22 crc kubenswrapper[4809]: I0930 00:30:22.381383 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 00:30:22 crc kubenswrapper[4809]: I0930 00:30:22.765091 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:22 crc kubenswrapper[4809]: I0930 00:30:22.942543 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b17561b2-81f8-4de9-817f-6e88f05730a9","Type":"ContainerStarted","Data":"ca59482742bda5cf04d6795e8bce21b07a0e8f69b686a1363dbc7e7e6f421349"} Sep 30 00:30:22 crc kubenswrapper[4809]: I0930 00:30:22.946842 4809 generic.go:334] "Generic (PLEG): container finished" podID="c6150622-6dfa-40f4-8d82-13e29dbcae08" containerID="0e141b2eb33bb6a66514c833e42f8eab07fd7ec72bfeb1c8fd8ac82b13c0ceaf" exitCode=0 Sep 30 00:30:22 crc kubenswrapper[4809]: I0930 00:30:22.948433 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" event={"ID":"c6150622-6dfa-40f4-8d82-13e29dbcae08","Type":"ContainerDied","Data":"0e141b2eb33bb6a66514c833e42f8eab07fd7ec72bfeb1c8fd8ac82b13c0ceaf"} Sep 30 00:30:23 crc kubenswrapper[4809]: I0930 00:30:23.435232 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-579f9b76b9-drlks" Sep 30 00:30:23 crc kubenswrapper[4809]: I0930 00:30:23.626117 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5c58bdbf84-bjfsw"] Sep 30 00:30:23 crc kubenswrapper[4809]: I0930 00:30:23.626583 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5c58bdbf84-bjfsw" podUID="54445277-2cb9-4dad-b1a4-0b0569ef0088" containerName="neutron-api" containerID="cri-o://1b08dec5bafb614a825d41caf811bcd9d094e8aa1c1f40387b57fb2442e45bac" gracePeriod=30 Sep 30 00:30:23 crc kubenswrapper[4809]: I0930 00:30:23.627097 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5c58bdbf84-bjfsw" podUID="54445277-2cb9-4dad-b1a4-0b0569ef0088" containerName="neutron-httpd" containerID="cri-o://60aaed830bfd1c3ba39d4d5a7c147d8461ed95d12290606e903a1e0ddc294de4" gracePeriod=30 Sep 30 00:30:23 crc kubenswrapper[4809]: I0930 00:30:23.741276 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92f70fbe-b165-4384-a087-4f6afd932cf1" path="/var/lib/kubelet/pods/92f70fbe-b165-4384-a087-4f6afd932cf1/volumes" Sep 30 00:30:23 crc kubenswrapper[4809]: I0930 00:30:23.751269 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:23 crc kubenswrapper[4809]: I0930 00:30:23.989911 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b17561b2-81f8-4de9-817f-6e88f05730a9","Type":"ContainerStarted","Data":"2ebdeda5f84dc4bfd7a48c8153845b841aa2fd36f4affa8382beec9ff92bef9d"} Sep 30 00:30:23 crc kubenswrapper[4809]: I0930 00:30:23.990440 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="b17561b2-81f8-4de9-817f-6e88f05730a9" containerName="cinder-api-log" containerID="cri-o://ca59482742bda5cf04d6795e8bce21b07a0e8f69b686a1363dbc7e7e6f421349" gracePeriod=30 Sep 30 00:30:23 crc kubenswrapper[4809]: I0930 00:30:23.990679 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 30 00:30:23 crc kubenswrapper[4809]: I0930 00:30:23.990925 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="b17561b2-81f8-4de9-817f-6e88f05730a9" containerName="cinder-api" containerID="cri-o://2ebdeda5f84dc4bfd7a48c8153845b841aa2fd36f4affa8382beec9ff92bef9d" gracePeriod=30 Sep 30 00:30:24 crc kubenswrapper[4809]: I0930 00:30:24.006724 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" event={"ID":"c6150622-6dfa-40f4-8d82-13e29dbcae08","Type":"ContainerStarted","Data":"8aa35f0f32d41c49084edbbb21b99149fb7269921b35410be0ddb251bedcc687"} Sep 30 00:30:24 crc kubenswrapper[4809]: I0930 00:30:24.007526 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:24 crc kubenswrapper[4809]: I0930 00:30:24.016495 4809 generic.go:334] "Generic (PLEG): container finished" podID="54445277-2cb9-4dad-b1a4-0b0569ef0088" containerID="60aaed830bfd1c3ba39d4d5a7c147d8461ed95d12290606e903a1e0ddc294de4" exitCode=0 Sep 30 00:30:24 crc kubenswrapper[4809]: I0930 00:30:24.016570 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c58bdbf84-bjfsw" event={"ID":"54445277-2cb9-4dad-b1a4-0b0569ef0088","Type":"ContainerDied","Data":"60aaed830bfd1c3ba39d4d5a7c147d8461ed95d12290606e903a1e0ddc294de4"} Sep 30 00:30:24 crc kubenswrapper[4809]: I0930 00:30:24.040519 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.040499176 podStartE2EDuration="4.040499176s" podCreationTimestamp="2025-09-30 00:30:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:30:24.012564175 +0000 UTC m=+1275.048813593" watchObservedRunningTime="2025-09-30 00:30:24.040499176 +0000 UTC m=+1275.076748594" Sep 30 00:30:24 crc kubenswrapper[4809]: I0930 00:30:24.043831 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7f997756-ee76-40da-bdd7-baa8055994fc","Type":"ContainerStarted","Data":"3847ebfeef07beb4b695ee51b1be8276613155fff5bf51064d4548b3de7468fc"} Sep 30 00:30:24 crc kubenswrapper[4809]: I0930 00:30:24.047247 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" podStartSLOduration=4.047233578 podStartE2EDuration="4.047233578s" podCreationTimestamp="2025-09-30 00:30:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:30:24.038581023 +0000 UTC m=+1275.074830451" watchObservedRunningTime="2025-09-30 00:30:24.047233578 +0000 UTC m=+1275.083482986" Sep 30 00:30:25 crc kubenswrapper[4809]: I0930 00:30:25.065315 4809 generic.go:334] "Generic (PLEG): container finished" podID="b17561b2-81f8-4de9-817f-6e88f05730a9" containerID="ca59482742bda5cf04d6795e8bce21b07a0e8f69b686a1363dbc7e7e6f421349" exitCode=143 Sep 30 00:30:25 crc kubenswrapper[4809]: I0930 00:30:25.065407 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b17561b2-81f8-4de9-817f-6e88f05730a9","Type":"ContainerDied","Data":"ca59482742bda5cf04d6795e8bce21b07a0e8f69b686a1363dbc7e7e6f421349"} Sep 30 00:30:25 crc kubenswrapper[4809]: I0930 00:30:25.069694 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7f997756-ee76-40da-bdd7-baa8055994fc","Type":"ContainerStarted","Data":"1625966603c07e663381a969cff69a732d3e9eaa42529602120a6062c874094b"} Sep 30 00:30:25 crc kubenswrapper[4809]: I0930 00:30:25.094508 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.058119395 podStartE2EDuration="6.09448201s" podCreationTimestamp="2025-09-30 00:30:19 +0000 UTC" firstStartedPulling="2025-09-30 00:30:21.674960503 +0000 UTC m=+1272.711209911" lastFinishedPulling="2025-09-30 00:30:22.711323118 +0000 UTC m=+1273.747572526" observedRunningTime="2025-09-30 00:30:25.086591375 +0000 UTC m=+1276.122840783" watchObservedRunningTime="2025-09-30 00:30:25.09448201 +0000 UTC m=+1276.130731418" Sep 30 00:30:25 crc kubenswrapper[4809]: I0930 00:30:25.325056 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:30:25 crc kubenswrapper[4809]: I0930 00:30:25.325115 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:30:25 crc kubenswrapper[4809]: I0930 00:30:25.443728 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:25 crc kubenswrapper[4809]: I0930 00:30:25.470979 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 30 00:30:25 crc kubenswrapper[4809]: I0930 00:30:25.493278 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6c8df69b5b-8qc6t" Sep 30 00:30:25 crc kubenswrapper[4809]: I0930 00:30:25.588478 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6b8bf7879d-9q9nt"] Sep 30 00:30:25 crc kubenswrapper[4809]: I0930 00:30:25.588707 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6b8bf7879d-9q9nt" podUID="f3c852e4-ed87-4a74-9660-fcd02babe2e5" containerName="barbican-api-log" containerID="cri-o://d115a77a59d1cf73b28846d906ab5b615a72a6d081d62b968741ece41b5e17f5" gracePeriod=30 Sep 30 00:30:25 crc kubenswrapper[4809]: I0930 00:30:25.589162 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6b8bf7879d-9q9nt" podUID="f3c852e4-ed87-4a74-9660-fcd02babe2e5" containerName="barbican-api" containerID="cri-o://8142e13ebd8d44e9d10909b902df5281cea7dc9bfd55c48d4c6b1fac3c577db7" gracePeriod=30 Sep 30 00:30:26 crc kubenswrapper[4809]: I0930 00:30:26.079678 4809 generic.go:334] "Generic (PLEG): container finished" podID="f3c852e4-ed87-4a74-9660-fcd02babe2e5" containerID="d115a77a59d1cf73b28846d906ab5b615a72a6d081d62b968741ece41b5e17f5" exitCode=143 Sep 30 00:30:26 crc kubenswrapper[4809]: I0930 00:30:26.079747 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b8bf7879d-9q9nt" event={"ID":"f3c852e4-ed87-4a74-9660-fcd02babe2e5","Type":"ContainerDied","Data":"d115a77a59d1cf73b28846d906ab5b615a72a6d081d62b968741ece41b5e17f5"} Sep 30 00:30:26 crc kubenswrapper[4809]: I0930 00:30:26.776409 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:30:26 crc kubenswrapper[4809]: I0930 00:30:26.847183 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-httpd-config\") pod \"54445277-2cb9-4dad-b1a4-0b0569ef0088\" (UID: \"54445277-2cb9-4dad-b1a4-0b0569ef0088\") " Sep 30 00:30:26 crc kubenswrapper[4809]: I0930 00:30:26.847331 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-ovndb-tls-certs\") pod \"54445277-2cb9-4dad-b1a4-0b0569ef0088\" (UID: \"54445277-2cb9-4dad-b1a4-0b0569ef0088\") " Sep 30 00:30:26 crc kubenswrapper[4809]: I0930 00:30:26.847362 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-combined-ca-bundle\") pod \"54445277-2cb9-4dad-b1a4-0b0569ef0088\" (UID: \"54445277-2cb9-4dad-b1a4-0b0569ef0088\") " Sep 30 00:30:26 crc kubenswrapper[4809]: I0930 00:30:26.847544 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-config\") pod \"54445277-2cb9-4dad-b1a4-0b0569ef0088\" (UID: \"54445277-2cb9-4dad-b1a4-0b0569ef0088\") " Sep 30 00:30:26 crc kubenswrapper[4809]: I0930 00:30:26.847583 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9qjm\" (UniqueName: \"kubernetes.io/projected/54445277-2cb9-4dad-b1a4-0b0569ef0088-kube-api-access-d9qjm\") pod \"54445277-2cb9-4dad-b1a4-0b0569ef0088\" (UID: \"54445277-2cb9-4dad-b1a4-0b0569ef0088\") " Sep 30 00:30:26 crc kubenswrapper[4809]: I0930 00:30:26.854840 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "54445277-2cb9-4dad-b1a4-0b0569ef0088" (UID: "54445277-2cb9-4dad-b1a4-0b0569ef0088"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:26 crc kubenswrapper[4809]: I0930 00:30:26.859813 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54445277-2cb9-4dad-b1a4-0b0569ef0088-kube-api-access-d9qjm" (OuterVolumeSpecName: "kube-api-access-d9qjm") pod "54445277-2cb9-4dad-b1a4-0b0569ef0088" (UID: "54445277-2cb9-4dad-b1a4-0b0569ef0088"). InnerVolumeSpecName "kube-api-access-d9qjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:26 crc kubenswrapper[4809]: I0930 00:30:26.906192 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "54445277-2cb9-4dad-b1a4-0b0569ef0088" (UID: "54445277-2cb9-4dad-b1a4-0b0569ef0088"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:26 crc kubenswrapper[4809]: I0930 00:30:26.936374 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "54445277-2cb9-4dad-b1a4-0b0569ef0088" (UID: "54445277-2cb9-4dad-b1a4-0b0569ef0088"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:26 crc kubenswrapper[4809]: I0930 00:30:26.937766 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-config" (OuterVolumeSpecName: "config") pod "54445277-2cb9-4dad-b1a4-0b0569ef0088" (UID: "54445277-2cb9-4dad-b1a4-0b0569ef0088"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:26 crc kubenswrapper[4809]: I0930 00:30:26.949717 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:26 crc kubenswrapper[4809]: I0930 00:30:26.949755 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9qjm\" (UniqueName: \"kubernetes.io/projected/54445277-2cb9-4dad-b1a4-0b0569ef0088-kube-api-access-d9qjm\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:26 crc kubenswrapper[4809]: I0930 00:30:26.949770 4809 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-httpd-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:26 crc kubenswrapper[4809]: I0930 00:30:26.949783 4809 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:26 crc kubenswrapper[4809]: I0930 00:30:26.949794 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54445277-2cb9-4dad-b1a4-0b0569ef0088-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:27 crc kubenswrapper[4809]: I0930 00:30:27.093087 4809 generic.go:334] "Generic (PLEG): container finished" podID="54445277-2cb9-4dad-b1a4-0b0569ef0088" containerID="1b08dec5bafb614a825d41caf811bcd9d094e8aa1c1f40387b57fb2442e45bac" exitCode=0 Sep 30 00:30:27 crc kubenswrapper[4809]: I0930 00:30:27.093921 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5c58bdbf84-bjfsw" Sep 30 00:30:27 crc kubenswrapper[4809]: I0930 00:30:27.102016 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c58bdbf84-bjfsw" event={"ID":"54445277-2cb9-4dad-b1a4-0b0569ef0088","Type":"ContainerDied","Data":"1b08dec5bafb614a825d41caf811bcd9d094e8aa1c1f40387b57fb2442e45bac"} Sep 30 00:30:27 crc kubenswrapper[4809]: I0930 00:30:27.102044 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c58bdbf84-bjfsw" event={"ID":"54445277-2cb9-4dad-b1a4-0b0569ef0088","Type":"ContainerDied","Data":"75ec8470b0e3238f61f3ec37f1ff751c8568546f034ed1994e38e512208b09f3"} Sep 30 00:30:27 crc kubenswrapper[4809]: I0930 00:30:27.102062 4809 scope.go:117] "RemoveContainer" containerID="60aaed830bfd1c3ba39d4d5a7c147d8461ed95d12290606e903a1e0ddc294de4" Sep 30 00:30:27 crc kubenswrapper[4809]: I0930 00:30:27.127607 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5c58bdbf84-bjfsw"] Sep 30 00:30:27 crc kubenswrapper[4809]: I0930 00:30:27.136804 4809 scope.go:117] "RemoveContainer" containerID="1b08dec5bafb614a825d41caf811bcd9d094e8aa1c1f40387b57fb2442e45bac" Sep 30 00:30:27 crc kubenswrapper[4809]: I0930 00:30:27.138129 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5c58bdbf84-bjfsw"] Sep 30 00:30:27 crc kubenswrapper[4809]: I0930 00:30:27.165386 4809 scope.go:117] "RemoveContainer" containerID="60aaed830bfd1c3ba39d4d5a7c147d8461ed95d12290606e903a1e0ddc294de4" Sep 30 00:30:27 crc kubenswrapper[4809]: E0930 00:30:27.167789 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60aaed830bfd1c3ba39d4d5a7c147d8461ed95d12290606e903a1e0ddc294de4\": container with ID starting with 60aaed830bfd1c3ba39d4d5a7c147d8461ed95d12290606e903a1e0ddc294de4 not found: ID does not exist" containerID="60aaed830bfd1c3ba39d4d5a7c147d8461ed95d12290606e903a1e0ddc294de4" Sep 30 00:30:27 crc kubenswrapper[4809]: I0930 00:30:27.167842 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60aaed830bfd1c3ba39d4d5a7c147d8461ed95d12290606e903a1e0ddc294de4"} err="failed to get container status \"60aaed830bfd1c3ba39d4d5a7c147d8461ed95d12290606e903a1e0ddc294de4\": rpc error: code = NotFound desc = could not find container \"60aaed830bfd1c3ba39d4d5a7c147d8461ed95d12290606e903a1e0ddc294de4\": container with ID starting with 60aaed830bfd1c3ba39d4d5a7c147d8461ed95d12290606e903a1e0ddc294de4 not found: ID does not exist" Sep 30 00:30:27 crc kubenswrapper[4809]: I0930 00:30:27.167881 4809 scope.go:117] "RemoveContainer" containerID="1b08dec5bafb614a825d41caf811bcd9d094e8aa1c1f40387b57fb2442e45bac" Sep 30 00:30:27 crc kubenswrapper[4809]: E0930 00:30:27.168381 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b08dec5bafb614a825d41caf811bcd9d094e8aa1c1f40387b57fb2442e45bac\": container with ID starting with 1b08dec5bafb614a825d41caf811bcd9d094e8aa1c1f40387b57fb2442e45bac not found: ID does not exist" containerID="1b08dec5bafb614a825d41caf811bcd9d094e8aa1c1f40387b57fb2442e45bac" Sep 30 00:30:27 crc kubenswrapper[4809]: I0930 00:30:27.168426 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b08dec5bafb614a825d41caf811bcd9d094e8aa1c1f40387b57fb2442e45bac"} err="failed to get container status \"1b08dec5bafb614a825d41caf811bcd9d094e8aa1c1f40387b57fb2442e45bac\": rpc error: code = NotFound desc = could not find container \"1b08dec5bafb614a825d41caf811bcd9d094e8aa1c1f40387b57fb2442e45bac\": container with ID starting with 1b08dec5bafb614a825d41caf811bcd9d094e8aa1c1f40387b57fb2442e45bac not found: ID does not exist" Sep 30 00:30:27 crc kubenswrapper[4809]: I0930 00:30:27.709380 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54445277-2cb9-4dad-b1a4-0b0569ef0088" path="/var/lib/kubelet/pods/54445277-2cb9-4dad-b1a4-0b0569ef0088/volumes" Sep 30 00:30:29 crc kubenswrapper[4809]: I0930 00:30:29.128963 4809 generic.go:334] "Generic (PLEG): container finished" podID="f3c852e4-ed87-4a74-9660-fcd02babe2e5" containerID="8142e13ebd8d44e9d10909b902df5281cea7dc9bfd55c48d4c6b1fac3c577db7" exitCode=0 Sep 30 00:30:29 crc kubenswrapper[4809]: I0930 00:30:29.129038 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b8bf7879d-9q9nt" event={"ID":"f3c852e4-ed87-4a74-9660-fcd02babe2e5","Type":"ContainerDied","Data":"8142e13ebd8d44e9d10909b902df5281cea7dc9bfd55c48d4c6b1fac3c577db7"} Sep 30 00:30:29 crc kubenswrapper[4809]: I0930 00:30:29.320201 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:29 crc kubenswrapper[4809]: I0930 00:30:29.414221 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3c852e4-ed87-4a74-9660-fcd02babe2e5-config-data\") pod \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\" (UID: \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\") " Sep 30 00:30:29 crc kubenswrapper[4809]: I0930 00:30:29.414589 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkwcj\" (UniqueName: \"kubernetes.io/projected/f3c852e4-ed87-4a74-9660-fcd02babe2e5-kube-api-access-bkwcj\") pod \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\" (UID: \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\") " Sep 30 00:30:29 crc kubenswrapper[4809]: I0930 00:30:29.414628 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f3c852e4-ed87-4a74-9660-fcd02babe2e5-config-data-custom\") pod \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\" (UID: \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\") " Sep 30 00:30:29 crc kubenswrapper[4809]: I0930 00:30:29.414749 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3c852e4-ed87-4a74-9660-fcd02babe2e5-logs\") pod \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\" (UID: \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\") " Sep 30 00:30:29 crc kubenswrapper[4809]: I0930 00:30:29.414872 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3c852e4-ed87-4a74-9660-fcd02babe2e5-combined-ca-bundle\") pod \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\" (UID: \"f3c852e4-ed87-4a74-9660-fcd02babe2e5\") " Sep 30 00:30:29 crc kubenswrapper[4809]: I0930 00:30:29.415452 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3c852e4-ed87-4a74-9660-fcd02babe2e5-logs" (OuterVolumeSpecName: "logs") pod "f3c852e4-ed87-4a74-9660-fcd02babe2e5" (UID: "f3c852e4-ed87-4a74-9660-fcd02babe2e5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:30:29 crc kubenswrapper[4809]: I0930 00:30:29.420348 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3c852e4-ed87-4a74-9660-fcd02babe2e5-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f3c852e4-ed87-4a74-9660-fcd02babe2e5" (UID: "f3c852e4-ed87-4a74-9660-fcd02babe2e5"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:29 crc kubenswrapper[4809]: I0930 00:30:29.421397 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3c852e4-ed87-4a74-9660-fcd02babe2e5-kube-api-access-bkwcj" (OuterVolumeSpecName: "kube-api-access-bkwcj") pod "f3c852e4-ed87-4a74-9660-fcd02babe2e5" (UID: "f3c852e4-ed87-4a74-9660-fcd02babe2e5"). InnerVolumeSpecName "kube-api-access-bkwcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:29 crc kubenswrapper[4809]: I0930 00:30:29.451917 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3c852e4-ed87-4a74-9660-fcd02babe2e5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f3c852e4-ed87-4a74-9660-fcd02babe2e5" (UID: "f3c852e4-ed87-4a74-9660-fcd02babe2e5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:29 crc kubenswrapper[4809]: I0930 00:30:29.484021 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3c852e4-ed87-4a74-9660-fcd02babe2e5-config-data" (OuterVolumeSpecName: "config-data") pod "f3c852e4-ed87-4a74-9660-fcd02babe2e5" (UID: "f3c852e4-ed87-4a74-9660-fcd02babe2e5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:29 crc kubenswrapper[4809]: I0930 00:30:29.521297 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkwcj\" (UniqueName: \"kubernetes.io/projected/f3c852e4-ed87-4a74-9660-fcd02babe2e5-kube-api-access-bkwcj\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:29 crc kubenswrapper[4809]: I0930 00:30:29.521323 4809 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f3c852e4-ed87-4a74-9660-fcd02babe2e5-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:29 crc kubenswrapper[4809]: I0930 00:30:29.521334 4809 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3c852e4-ed87-4a74-9660-fcd02babe2e5-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:29 crc kubenswrapper[4809]: I0930 00:30:29.521344 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3c852e4-ed87-4a74-9660-fcd02babe2e5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:29 crc kubenswrapper[4809]: I0930 00:30:29.521353 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3c852e4-ed87-4a74-9660-fcd02babe2e5-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:29 crc kubenswrapper[4809]: I0930 00:30:29.774577 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:30:29 crc kubenswrapper[4809]: I0930 00:30:29.775713 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7f6c7c986d-hgzt9" Sep 30 00:30:30 crc kubenswrapper[4809]: I0930 00:30:30.140623 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b8bf7879d-9q9nt" event={"ID":"f3c852e4-ed87-4a74-9660-fcd02babe2e5","Type":"ContainerDied","Data":"d98e84ad669890b487383e1572dd35bacb122f99d76fee689ab315ebdcb579f0"} Sep 30 00:30:30 crc kubenswrapper[4809]: I0930 00:30:30.140696 4809 scope.go:117] "RemoveContainer" containerID="8142e13ebd8d44e9d10909b902df5281cea7dc9bfd55c48d4c6b1fac3c577db7" Sep 30 00:30:30 crc kubenswrapper[4809]: I0930 00:30:30.140662 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6b8bf7879d-9q9nt" Sep 30 00:30:30 crc kubenswrapper[4809]: I0930 00:30:30.168564 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6b8bf7879d-9q9nt"] Sep 30 00:30:30 crc kubenswrapper[4809]: I0930 00:30:30.172606 4809 scope.go:117] "RemoveContainer" containerID="d115a77a59d1cf73b28846d906ab5b615a72a6d081d62b968741ece41b5e17f5" Sep 30 00:30:30 crc kubenswrapper[4809]: I0930 00:30:30.179788 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-6b8bf7879d-9q9nt"] Sep 30 00:30:30 crc kubenswrapper[4809]: I0930 00:30:30.469510 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:30 crc kubenswrapper[4809]: I0930 00:30:30.570886 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-bdvnx"] Sep 30 00:30:30 crc kubenswrapper[4809]: I0930 00:30:30.571098 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" podUID="ab502782-7769-460f-8a2f-86d4886ec40a" containerName="dnsmasq-dns" containerID="cri-o://db7fc80ecb951daceaff02d4e2289f2c318240092d3e9daa10d8fabdab4c79f2" gracePeriod=10 Sep 30 00:30:30 crc kubenswrapper[4809]: I0930 00:30:30.780764 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 30 00:30:30 crc kubenswrapper[4809]: I0930 00:30:30.862007 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.151904 4809 generic.go:334] "Generic (PLEG): container finished" podID="ab502782-7769-460f-8a2f-86d4886ec40a" containerID="db7fc80ecb951daceaff02d4e2289f2c318240092d3e9daa10d8fabdab4c79f2" exitCode=0 Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.151995 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" event={"ID":"ab502782-7769-460f-8a2f-86d4886ec40a","Type":"ContainerDied","Data":"db7fc80ecb951daceaff02d4e2289f2c318240092d3e9daa10d8fabdab4c79f2"} Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.152049 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" event={"ID":"ab502782-7769-460f-8a2f-86d4886ec40a","Type":"ContainerDied","Data":"f7163504a2fc86ed9e4f89724aa04ea5a5279d06b13e90b6f4ea0be6076b945d"} Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.152064 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7163504a2fc86ed9e4f89724aa04ea5a5279d06b13e90b6f4ea0be6076b945d" Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.153414 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="7f997756-ee76-40da-bdd7-baa8055994fc" containerName="cinder-scheduler" containerID="cri-o://3847ebfeef07beb4b695ee51b1be8276613155fff5bf51064d4548b3de7468fc" gracePeriod=30 Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.153927 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="7f997756-ee76-40da-bdd7-baa8055994fc" containerName="probe" containerID="cri-o://1625966603c07e663381a969cff69a732d3e9eaa42529602120a6062c874094b" gracePeriod=30 Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.204388 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.372144 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-ovsdbserver-nb\") pod \"ab502782-7769-460f-8a2f-86d4886ec40a\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.372205 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-config\") pod \"ab502782-7769-460f-8a2f-86d4886ec40a\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.372263 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-dns-swift-storage-0\") pod \"ab502782-7769-460f-8a2f-86d4886ec40a\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.372291 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-ovsdbserver-sb\") pod \"ab502782-7769-460f-8a2f-86d4886ec40a\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.372383 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-dns-svc\") pod \"ab502782-7769-460f-8a2f-86d4886ec40a\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.372404 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zqtf\" (UniqueName: \"kubernetes.io/projected/ab502782-7769-460f-8a2f-86d4886ec40a-kube-api-access-5zqtf\") pod \"ab502782-7769-460f-8a2f-86d4886ec40a\" (UID: \"ab502782-7769-460f-8a2f-86d4886ec40a\") " Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.379280 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab502782-7769-460f-8a2f-86d4886ec40a-kube-api-access-5zqtf" (OuterVolumeSpecName: "kube-api-access-5zqtf") pod "ab502782-7769-460f-8a2f-86d4886ec40a" (UID: "ab502782-7769-460f-8a2f-86d4886ec40a"). InnerVolumeSpecName "kube-api-access-5zqtf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.429930 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-config" (OuterVolumeSpecName: "config") pod "ab502782-7769-460f-8a2f-86d4886ec40a" (UID: "ab502782-7769-460f-8a2f-86d4886ec40a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.439431 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ab502782-7769-460f-8a2f-86d4886ec40a" (UID: "ab502782-7769-460f-8a2f-86d4886ec40a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.439900 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ab502782-7769-460f-8a2f-86d4886ec40a" (UID: "ab502782-7769-460f-8a2f-86d4886ec40a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.443124 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ab502782-7769-460f-8a2f-86d4886ec40a" (UID: "ab502782-7769-460f-8a2f-86d4886ec40a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.445266 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ab502782-7769-460f-8a2f-86d4886ec40a" (UID: "ab502782-7769-460f-8a2f-86d4886ec40a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.475537 4809 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.475585 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.475598 4809 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.475611 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zqtf\" (UniqueName: \"kubernetes.io/projected/ab502782-7769-460f-8a2f-86d4886ec40a-kube-api-access-5zqtf\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.475624 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.475635 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab502782-7769-460f-8a2f-86d4886ec40a-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:31 crc kubenswrapper[4809]: I0930 00:30:31.713894 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3c852e4-ed87-4a74-9660-fcd02babe2e5" path="/var/lib/kubelet/pods/f3c852e4-ed87-4a74-9660-fcd02babe2e5/volumes" Sep 30 00:30:32 crc kubenswrapper[4809]: I0930 00:30:32.169168 4809 generic.go:334] "Generic (PLEG): container finished" podID="7f997756-ee76-40da-bdd7-baa8055994fc" containerID="1625966603c07e663381a969cff69a732d3e9eaa42529602120a6062c874094b" exitCode=0 Sep 30 00:30:32 crc kubenswrapper[4809]: I0930 00:30:32.169244 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7f997756-ee76-40da-bdd7-baa8055994fc","Type":"ContainerDied","Data":"1625966603c07e663381a969cff69a732d3e9eaa42529602120a6062c874094b"} Sep 30 00:30:32 crc kubenswrapper[4809]: I0930 00:30:32.169293 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" Sep 30 00:30:32 crc kubenswrapper[4809]: I0930 00:30:32.199547 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-bdvnx"] Sep 30 00:30:32 crc kubenswrapper[4809]: I0930 00:30:32.211852 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-bdvnx"] Sep 30 00:30:32 crc kubenswrapper[4809]: I0930 00:30:32.743477 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 30 00:30:33 crc kubenswrapper[4809]: I0930 00:30:33.470970 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-697fb77f5c-2p8qd" Sep 30 00:30:33 crc kubenswrapper[4809]: I0930 00:30:33.703394 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab502782-7769-460f-8a2f-86d4886ec40a" path="/var/lib/kubelet/pods/ab502782-7769-460f-8a2f-86d4886ec40a/volumes" Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.196803 4809 generic.go:334] "Generic (PLEG): container finished" podID="7f997756-ee76-40da-bdd7-baa8055994fc" containerID="3847ebfeef07beb4b695ee51b1be8276613155fff5bf51064d4548b3de7468fc" exitCode=0 Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.196851 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7f997756-ee76-40da-bdd7-baa8055994fc","Type":"ContainerDied","Data":"3847ebfeef07beb4b695ee51b1be8276613155fff5bf51064d4548b3de7468fc"} Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.374621 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.561428 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-combined-ca-bundle\") pod \"7f997756-ee76-40da-bdd7-baa8055994fc\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.561735 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bz5qs\" (UniqueName: \"kubernetes.io/projected/7f997756-ee76-40da-bdd7-baa8055994fc-kube-api-access-bz5qs\") pod \"7f997756-ee76-40da-bdd7-baa8055994fc\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.562054 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-config-data\") pod \"7f997756-ee76-40da-bdd7-baa8055994fc\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.562121 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-config-data-custom\") pod \"7f997756-ee76-40da-bdd7-baa8055994fc\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.562240 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7f997756-ee76-40da-bdd7-baa8055994fc-etc-machine-id\") pod \"7f997756-ee76-40da-bdd7-baa8055994fc\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.562308 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f997756-ee76-40da-bdd7-baa8055994fc-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7f997756-ee76-40da-bdd7-baa8055994fc" (UID: "7f997756-ee76-40da-bdd7-baa8055994fc"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.562407 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-scripts\") pod \"7f997756-ee76-40da-bdd7-baa8055994fc\" (UID: \"7f997756-ee76-40da-bdd7-baa8055994fc\") " Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.562877 4809 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7f997756-ee76-40da-bdd7-baa8055994fc-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.567156 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7f997756-ee76-40da-bdd7-baa8055994fc" (UID: "7f997756-ee76-40da-bdd7-baa8055994fc"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.571329 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f997756-ee76-40da-bdd7-baa8055994fc-kube-api-access-bz5qs" (OuterVolumeSpecName: "kube-api-access-bz5qs") pod "7f997756-ee76-40da-bdd7-baa8055994fc" (UID: "7f997756-ee76-40da-bdd7-baa8055994fc"). InnerVolumeSpecName "kube-api-access-bz5qs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.571935 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-scripts" (OuterVolumeSpecName: "scripts") pod "7f997756-ee76-40da-bdd7-baa8055994fc" (UID: "7f997756-ee76-40da-bdd7-baa8055994fc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.644487 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7f997756-ee76-40da-bdd7-baa8055994fc" (UID: "7f997756-ee76-40da-bdd7-baa8055994fc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.665314 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.665360 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.665375 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bz5qs\" (UniqueName: \"kubernetes.io/projected/7f997756-ee76-40da-bdd7-baa8055994fc-kube-api-access-bz5qs\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.665389 4809 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.671393 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-config-data" (OuterVolumeSpecName: "config-data") pod "7f997756-ee76-40da-bdd7-baa8055994fc" (UID: "7f997756-ee76-40da-bdd7-baa8055994fc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:35 crc kubenswrapper[4809]: I0930 00:30:35.767267 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f997756-ee76-40da-bdd7-baa8055994fc-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.003604 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-84b966f6c9-bdvnx" podUID="ab502782-7769-460f-8a2f-86d4886ec40a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.175:5353: i/o timeout" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.213438 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7f997756-ee76-40da-bdd7-baa8055994fc","Type":"ContainerDied","Data":"c4e1fb23fe27f63eaaf1234af7fada24e80aff966033e1bdf140d26dd1c610b9"} Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.213501 4809 scope.go:117] "RemoveContainer" containerID="1625966603c07e663381a969cff69a732d3e9eaa42529602120a6062c874094b" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.213686 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.241080 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.251243 4809 scope.go:117] "RemoveContainer" containerID="3847ebfeef07beb4b695ee51b1be8276613155fff5bf51064d4548b3de7468fc" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.252363 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.276968 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 00:30:36 crc kubenswrapper[4809]: E0930 00:30:36.277419 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f997756-ee76-40da-bdd7-baa8055994fc" containerName="cinder-scheduler" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.277441 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f997756-ee76-40da-bdd7-baa8055994fc" containerName="cinder-scheduler" Sep 30 00:30:36 crc kubenswrapper[4809]: E0930 00:30:36.277460 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab502782-7769-460f-8a2f-86d4886ec40a" containerName="init" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.277468 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab502782-7769-460f-8a2f-86d4886ec40a" containerName="init" Sep 30 00:30:36 crc kubenswrapper[4809]: E0930 00:30:36.277487 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab502782-7769-460f-8a2f-86d4886ec40a" containerName="dnsmasq-dns" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.277493 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab502782-7769-460f-8a2f-86d4886ec40a" containerName="dnsmasq-dns" Sep 30 00:30:36 crc kubenswrapper[4809]: E0930 00:30:36.277510 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3c852e4-ed87-4a74-9660-fcd02babe2e5" containerName="barbican-api-log" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.277516 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3c852e4-ed87-4a74-9660-fcd02babe2e5" containerName="barbican-api-log" Sep 30 00:30:36 crc kubenswrapper[4809]: E0930 00:30:36.277526 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54445277-2cb9-4dad-b1a4-0b0569ef0088" containerName="neutron-api" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.277532 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="54445277-2cb9-4dad-b1a4-0b0569ef0088" containerName="neutron-api" Sep 30 00:30:36 crc kubenswrapper[4809]: E0930 00:30:36.277544 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3c852e4-ed87-4a74-9660-fcd02babe2e5" containerName="barbican-api" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.277550 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3c852e4-ed87-4a74-9660-fcd02babe2e5" containerName="barbican-api" Sep 30 00:30:36 crc kubenswrapper[4809]: E0930 00:30:36.277566 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f997756-ee76-40da-bdd7-baa8055994fc" containerName="probe" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.277573 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f997756-ee76-40da-bdd7-baa8055994fc" containerName="probe" Sep 30 00:30:36 crc kubenswrapper[4809]: E0930 00:30:36.277583 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92f70fbe-b165-4384-a087-4f6afd932cf1" containerName="dnsmasq-dns" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.277590 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="92f70fbe-b165-4384-a087-4f6afd932cf1" containerName="dnsmasq-dns" Sep 30 00:30:36 crc kubenswrapper[4809]: E0930 00:30:36.277609 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92f70fbe-b165-4384-a087-4f6afd932cf1" containerName="init" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.277616 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="92f70fbe-b165-4384-a087-4f6afd932cf1" containerName="init" Sep 30 00:30:36 crc kubenswrapper[4809]: E0930 00:30:36.277631 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54445277-2cb9-4dad-b1a4-0b0569ef0088" containerName="neutron-httpd" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.277654 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="54445277-2cb9-4dad-b1a4-0b0569ef0088" containerName="neutron-httpd" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.277825 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f997756-ee76-40da-bdd7-baa8055994fc" containerName="probe" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.277837 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="92f70fbe-b165-4384-a087-4f6afd932cf1" containerName="dnsmasq-dns" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.277849 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="54445277-2cb9-4dad-b1a4-0b0569ef0088" containerName="neutron-httpd" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.277861 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="54445277-2cb9-4dad-b1a4-0b0569ef0088" containerName="neutron-api" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.277874 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f997756-ee76-40da-bdd7-baa8055994fc" containerName="cinder-scheduler" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.277890 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3c852e4-ed87-4a74-9660-fcd02babe2e5" containerName="barbican-api" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.277900 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab502782-7769-460f-8a2f-86d4886ec40a" containerName="dnsmasq-dns" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.277914 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3c852e4-ed87-4a74-9660-fcd02babe2e5" containerName="barbican-api-log" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.278990 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.283077 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.301282 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.381014 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e387977a-8b10-46fb-bd34-0d8212c55fac-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"e387977a-8b10-46fb-bd34-0d8212c55fac\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.381065 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e387977a-8b10-46fb-bd34-0d8212c55fac-scripts\") pod \"cinder-scheduler-0\" (UID: \"e387977a-8b10-46fb-bd34-0d8212c55fac\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.381125 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e387977a-8b10-46fb-bd34-0d8212c55fac-config-data\") pod \"cinder-scheduler-0\" (UID: \"e387977a-8b10-46fb-bd34-0d8212c55fac\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.381172 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e387977a-8b10-46fb-bd34-0d8212c55fac-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"e387977a-8b10-46fb-bd34-0d8212c55fac\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.381236 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-db9m7\" (UniqueName: \"kubernetes.io/projected/e387977a-8b10-46fb-bd34-0d8212c55fac-kube-api-access-db9m7\") pod \"cinder-scheduler-0\" (UID: \"e387977a-8b10-46fb-bd34-0d8212c55fac\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.381290 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e387977a-8b10-46fb-bd34-0d8212c55fac-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"e387977a-8b10-46fb-bd34-0d8212c55fac\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.483289 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-db9m7\" (UniqueName: \"kubernetes.io/projected/e387977a-8b10-46fb-bd34-0d8212c55fac-kube-api-access-db9m7\") pod \"cinder-scheduler-0\" (UID: \"e387977a-8b10-46fb-bd34-0d8212c55fac\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.483396 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e387977a-8b10-46fb-bd34-0d8212c55fac-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"e387977a-8b10-46fb-bd34-0d8212c55fac\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.483447 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e387977a-8b10-46fb-bd34-0d8212c55fac-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"e387977a-8b10-46fb-bd34-0d8212c55fac\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.483466 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e387977a-8b10-46fb-bd34-0d8212c55fac-scripts\") pod \"cinder-scheduler-0\" (UID: \"e387977a-8b10-46fb-bd34-0d8212c55fac\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.483521 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e387977a-8b10-46fb-bd34-0d8212c55fac-config-data\") pod \"cinder-scheduler-0\" (UID: \"e387977a-8b10-46fb-bd34-0d8212c55fac\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.483552 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e387977a-8b10-46fb-bd34-0d8212c55fac-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"e387977a-8b10-46fb-bd34-0d8212c55fac\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.483959 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e387977a-8b10-46fb-bd34-0d8212c55fac-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"e387977a-8b10-46fb-bd34-0d8212c55fac\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.488946 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e387977a-8b10-46fb-bd34-0d8212c55fac-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"e387977a-8b10-46fb-bd34-0d8212c55fac\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.493110 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e387977a-8b10-46fb-bd34-0d8212c55fac-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"e387977a-8b10-46fb-bd34-0d8212c55fac\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.497767 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e387977a-8b10-46fb-bd34-0d8212c55fac-config-data\") pod \"cinder-scheduler-0\" (UID: \"e387977a-8b10-46fb-bd34-0d8212c55fac\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.500832 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e387977a-8b10-46fb-bd34-0d8212c55fac-scripts\") pod \"cinder-scheduler-0\" (UID: \"e387977a-8b10-46fb-bd34-0d8212c55fac\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.504146 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-db9m7\" (UniqueName: \"kubernetes.io/projected/e387977a-8b10-46fb-bd34-0d8212c55fac-kube-api-access-db9m7\") pod \"cinder-scheduler-0\" (UID: \"e387977a-8b10-46fb-bd34-0d8212c55fac\") " pod="openstack/cinder-scheduler-0" Sep 30 00:30:36 crc kubenswrapper[4809]: I0930 00:30:36.606131 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.100892 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 00:30:37 crc kubenswrapper[4809]: W0930 00:30:37.103155 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode387977a_8b10_46fb_bd34_0d8212c55fac.slice/crio-9ee401c8d9d6f80e0bf6ea75ce47a6a3ec6e387a258788be1c339b1d4144ea63 WatchSource:0}: Error finding container 9ee401c8d9d6f80e0bf6ea75ce47a6a3ec6e387a258788be1c339b1d4144ea63: Status 404 returned error can't find the container with id 9ee401c8d9d6f80e0bf6ea75ce47a6a3ec6e387a258788be1c339b1d4144ea63 Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.258416 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"e387977a-8b10-46fb-bd34-0d8212c55fac","Type":"ContainerStarted","Data":"9ee401c8d9d6f80e0bf6ea75ce47a6a3ec6e387a258788be1c339b1d4144ea63"} Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.485808 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.487388 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.489466 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.489473 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.492577 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-xddmt" Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.496465 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.631282 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kk8f6\" (UniqueName: \"kubernetes.io/projected/96d0165f-2a62-4c67-b140-0073b5ef59a0-kube-api-access-kk8f6\") pod \"openstackclient\" (UID: \"96d0165f-2a62-4c67-b140-0073b5ef59a0\") " pod="openstack/openstackclient" Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.631615 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/96d0165f-2a62-4c67-b140-0073b5ef59a0-openstack-config-secret\") pod \"openstackclient\" (UID: \"96d0165f-2a62-4c67-b140-0073b5ef59a0\") " pod="openstack/openstackclient" Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.631728 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/96d0165f-2a62-4c67-b140-0073b5ef59a0-openstack-config\") pod \"openstackclient\" (UID: \"96d0165f-2a62-4c67-b140-0073b5ef59a0\") " pod="openstack/openstackclient" Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.631777 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96d0165f-2a62-4c67-b140-0073b5ef59a0-combined-ca-bundle\") pod \"openstackclient\" (UID: \"96d0165f-2a62-4c67-b140-0073b5ef59a0\") " pod="openstack/openstackclient" Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.702587 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f997756-ee76-40da-bdd7-baa8055994fc" path="/var/lib/kubelet/pods/7f997756-ee76-40da-bdd7-baa8055994fc/volumes" Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.733400 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kk8f6\" (UniqueName: \"kubernetes.io/projected/96d0165f-2a62-4c67-b140-0073b5ef59a0-kube-api-access-kk8f6\") pod \"openstackclient\" (UID: \"96d0165f-2a62-4c67-b140-0073b5ef59a0\") " pod="openstack/openstackclient" Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.733477 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/96d0165f-2a62-4c67-b140-0073b5ef59a0-openstack-config-secret\") pod \"openstackclient\" (UID: \"96d0165f-2a62-4c67-b140-0073b5ef59a0\") " pod="openstack/openstackclient" Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.733529 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/96d0165f-2a62-4c67-b140-0073b5ef59a0-openstack-config\") pod \"openstackclient\" (UID: \"96d0165f-2a62-4c67-b140-0073b5ef59a0\") " pod="openstack/openstackclient" Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.733573 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96d0165f-2a62-4c67-b140-0073b5ef59a0-combined-ca-bundle\") pod \"openstackclient\" (UID: \"96d0165f-2a62-4c67-b140-0073b5ef59a0\") " pod="openstack/openstackclient" Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.734388 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/96d0165f-2a62-4c67-b140-0073b5ef59a0-openstack-config\") pod \"openstackclient\" (UID: \"96d0165f-2a62-4c67-b140-0073b5ef59a0\") " pod="openstack/openstackclient" Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.737018 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/96d0165f-2a62-4c67-b140-0073b5ef59a0-openstack-config-secret\") pod \"openstackclient\" (UID: \"96d0165f-2a62-4c67-b140-0073b5ef59a0\") " pod="openstack/openstackclient" Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.737971 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96d0165f-2a62-4c67-b140-0073b5ef59a0-combined-ca-bundle\") pod \"openstackclient\" (UID: \"96d0165f-2a62-4c67-b140-0073b5ef59a0\") " pod="openstack/openstackclient" Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.760781 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kk8f6\" (UniqueName: \"kubernetes.io/projected/96d0165f-2a62-4c67-b140-0073b5ef59a0-kube-api-access-kk8f6\") pod \"openstackclient\" (UID: \"96d0165f-2a62-4c67-b140-0073b5ef59a0\") " pod="openstack/openstackclient" Sep 30 00:30:37 crc kubenswrapper[4809]: I0930 00:30:37.814412 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 00:30:38 crc kubenswrapper[4809]: I0930 00:30:38.249421 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 00:30:38 crc kubenswrapper[4809]: I0930 00:30:38.292833 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"e387977a-8b10-46fb-bd34-0d8212c55fac","Type":"ContainerStarted","Data":"65f77e007abd7cfcad639d9e0425d18c54477a18831b7a68d82e9443eaa9cbd8"} Sep 30 00:30:39 crc kubenswrapper[4809]: I0930 00:30:39.306244 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"e387977a-8b10-46fb-bd34-0d8212c55fac","Type":"ContainerStarted","Data":"e060e17c5a927e84fb61bcebb7e26429d289c2b8b4ad400fa2ab52cf9414cb79"} Sep 30 00:30:39 crc kubenswrapper[4809]: I0930 00:30:39.307764 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"96d0165f-2a62-4c67-b140-0073b5ef59a0","Type":"ContainerStarted","Data":"51bd53bb8c013e62099ea7f18990447a644c27ab07be7540110efa9b984f4405"} Sep 30 00:30:39 crc kubenswrapper[4809]: I0930 00:30:39.330978 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.330960359 podStartE2EDuration="3.330960359s" podCreationTimestamp="2025-09-30 00:30:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:30:39.328604124 +0000 UTC m=+1290.364853542" watchObservedRunningTime="2025-09-30 00:30:39.330960359 +0000 UTC m=+1290.367209777" Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.704436 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-6c8878cd54-q8bdj"] Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.713830 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-6c8878cd54-q8bdj" Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.753100 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-j9vcq" Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.753299 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.753437 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.783848 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-6c8878cd54-q8bdj"] Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.798207 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ff7679-5c63-4bb2-a427-90410586d459-config-data\") pod \"heat-engine-6c8878cd54-q8bdj\" (UID: \"30ff7679-5c63-4bb2-a427-90410586d459\") " pod="openstack/heat-engine-6c8878cd54-q8bdj" Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.798249 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szqtx\" (UniqueName: \"kubernetes.io/projected/30ff7679-5c63-4bb2-a427-90410586d459-kube-api-access-szqtx\") pod \"heat-engine-6c8878cd54-q8bdj\" (UID: \"30ff7679-5c63-4bb2-a427-90410586d459\") " pod="openstack/heat-engine-6c8878cd54-q8bdj" Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.798384 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ff7679-5c63-4bb2-a427-90410586d459-combined-ca-bundle\") pod \"heat-engine-6c8878cd54-q8bdj\" (UID: \"30ff7679-5c63-4bb2-a427-90410586d459\") " pod="openstack/heat-engine-6c8878cd54-q8bdj" Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.799227 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30ff7679-5c63-4bb2-a427-90410586d459-config-data-custom\") pod \"heat-engine-6c8878cd54-q8bdj\" (UID: \"30ff7679-5c63-4bb2-a427-90410586d459\") " pod="openstack/heat-engine-6c8878cd54-q8bdj" Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.900993 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ff7679-5c63-4bb2-a427-90410586d459-config-data\") pod \"heat-engine-6c8878cd54-q8bdj\" (UID: \"30ff7679-5c63-4bb2-a427-90410586d459\") " pod="openstack/heat-engine-6c8878cd54-q8bdj" Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.901038 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szqtx\" (UniqueName: \"kubernetes.io/projected/30ff7679-5c63-4bb2-a427-90410586d459-kube-api-access-szqtx\") pod \"heat-engine-6c8878cd54-q8bdj\" (UID: \"30ff7679-5c63-4bb2-a427-90410586d459\") " pod="openstack/heat-engine-6c8878cd54-q8bdj" Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.901072 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ff7679-5c63-4bb2-a427-90410586d459-combined-ca-bundle\") pod \"heat-engine-6c8878cd54-q8bdj\" (UID: \"30ff7679-5c63-4bb2-a427-90410586d459\") " pod="openstack/heat-engine-6c8878cd54-q8bdj" Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.901135 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30ff7679-5c63-4bb2-a427-90410586d459-config-data-custom\") pod \"heat-engine-6c8878cd54-q8bdj\" (UID: \"30ff7679-5c63-4bb2-a427-90410586d459\") " pod="openstack/heat-engine-6c8878cd54-q8bdj" Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.910190 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ff7679-5c63-4bb2-a427-90410586d459-config-data\") pod \"heat-engine-6c8878cd54-q8bdj\" (UID: \"30ff7679-5c63-4bb2-a427-90410586d459\") " pod="openstack/heat-engine-6c8878cd54-q8bdj" Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.910442 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ff7679-5c63-4bb2-a427-90410586d459-combined-ca-bundle\") pod \"heat-engine-6c8878cd54-q8bdj\" (UID: \"30ff7679-5c63-4bb2-a427-90410586d459\") " pod="openstack/heat-engine-6c8878cd54-q8bdj" Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.918476 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f6bc4c6c9-r57b4"] Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.922095 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.928251 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30ff7679-5c63-4bb2-a427-90410586d459-config-data-custom\") pod \"heat-engine-6c8878cd54-q8bdj\" (UID: \"30ff7679-5c63-4bb2-a427-90410586d459\") " pod="openstack/heat-engine-6c8878cd54-q8bdj" Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.942942 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szqtx\" (UniqueName: \"kubernetes.io/projected/30ff7679-5c63-4bb2-a427-90410586d459-kube-api-access-szqtx\") pod \"heat-engine-6c8878cd54-q8bdj\" (UID: \"30ff7679-5c63-4bb2-a427-90410586d459\") " pod="openstack/heat-engine-6c8878cd54-q8bdj" Sep 30 00:30:40 crc kubenswrapper[4809]: I0930 00:30:40.959756 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f6bc4c6c9-r57b4"] Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.002440 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-config\") pod \"dnsmasq-dns-f6bc4c6c9-r57b4\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.002532 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-ovsdbserver-nb\") pod \"dnsmasq-dns-f6bc4c6c9-r57b4\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.002551 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-dns-svc\") pod \"dnsmasq-dns-f6bc4c6c9-r57b4\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.002614 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-ovsdbserver-sb\") pod \"dnsmasq-dns-f6bc4c6c9-r57b4\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.002631 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-dns-swift-storage-0\") pod \"dnsmasq-dns-f6bc4c6c9-r57b4\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.002724 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8wqh\" (UniqueName: \"kubernetes.io/projected/f585e9ed-4d32-49f6-84af-87ba0d4093d9-kube-api-access-g8wqh\") pod \"dnsmasq-dns-f6bc4c6c9-r57b4\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.019901 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-6949b76cf4-kw8lk"] Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.022211 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.032030 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.041905 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-6949b76cf4-kw8lk"] Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.068134 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-c5884cff9-92mmk"] Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.070869 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-c5884cff9-92mmk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.089673 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-c5884cff9-92mmk"] Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.089789 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.109045 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-ovsdbserver-sb\") pod \"dnsmasq-dns-f6bc4c6c9-r57b4\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.109099 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-dns-swift-storage-0\") pod \"dnsmasq-dns-f6bc4c6c9-r57b4\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.109140 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lg5f2\" (UniqueName: \"kubernetes.io/projected/e7306bf5-ea9b-4121-a0b0-af1603503993-kube-api-access-lg5f2\") pod \"heat-cfnapi-6949b76cf4-kw8lk\" (UID: \"e7306bf5-ea9b-4121-a0b0-af1603503993\") " pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.109176 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e7306bf5-ea9b-4121-a0b0-af1603503993-config-data-custom\") pod \"heat-cfnapi-6949b76cf4-kw8lk\" (UID: \"e7306bf5-ea9b-4121-a0b0-af1603503993\") " pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.109214 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7306bf5-ea9b-4121-a0b0-af1603503993-combined-ca-bundle\") pod \"heat-cfnapi-6949b76cf4-kw8lk\" (UID: \"e7306bf5-ea9b-4121-a0b0-af1603503993\") " pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.109244 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8wqh\" (UniqueName: \"kubernetes.io/projected/f585e9ed-4d32-49f6-84af-87ba0d4093d9-kube-api-access-g8wqh\") pod \"dnsmasq-dns-f6bc4c6c9-r57b4\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.109286 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-config\") pod \"dnsmasq-dns-f6bc4c6c9-r57b4\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.109363 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7306bf5-ea9b-4121-a0b0-af1603503993-config-data\") pod \"heat-cfnapi-6949b76cf4-kw8lk\" (UID: \"e7306bf5-ea9b-4121-a0b0-af1603503993\") " pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.109393 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-ovsdbserver-nb\") pod \"dnsmasq-dns-f6bc4c6c9-r57b4\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.109417 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-dns-svc\") pod \"dnsmasq-dns-f6bc4c6c9-r57b4\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.110460 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-dns-svc\") pod \"dnsmasq-dns-f6bc4c6c9-r57b4\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.112393 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-config\") pod \"dnsmasq-dns-f6bc4c6c9-r57b4\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.113091 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-dns-swift-storage-0\") pod \"dnsmasq-dns-f6bc4c6c9-r57b4\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.113157 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-ovsdbserver-nb\") pod \"dnsmasq-dns-f6bc4c6c9-r57b4\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.118449 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-ovsdbserver-sb\") pod \"dnsmasq-dns-f6bc4c6c9-r57b4\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.149280 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-6c8878cd54-q8bdj" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.173474 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8wqh\" (UniqueName: \"kubernetes.io/projected/f585e9ed-4d32-49f6-84af-87ba0d4093d9-kube-api-access-g8wqh\") pod \"dnsmasq-dns-f6bc4c6c9-r57b4\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.210742 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lg5f2\" (UniqueName: \"kubernetes.io/projected/e7306bf5-ea9b-4121-a0b0-af1603503993-kube-api-access-lg5f2\") pod \"heat-cfnapi-6949b76cf4-kw8lk\" (UID: \"e7306bf5-ea9b-4121-a0b0-af1603503993\") " pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.210797 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfjzv\" (UniqueName: \"kubernetes.io/projected/107d7545-41aa-471a-86e9-aa7e557f8faa-kube-api-access-jfjzv\") pod \"heat-api-c5884cff9-92mmk\" (UID: \"107d7545-41aa-471a-86e9-aa7e557f8faa\") " pod="openstack/heat-api-c5884cff9-92mmk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.210824 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e7306bf5-ea9b-4121-a0b0-af1603503993-config-data-custom\") pod \"heat-cfnapi-6949b76cf4-kw8lk\" (UID: \"e7306bf5-ea9b-4121-a0b0-af1603503993\") " pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.210843 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/107d7545-41aa-471a-86e9-aa7e557f8faa-config-data\") pod \"heat-api-c5884cff9-92mmk\" (UID: \"107d7545-41aa-471a-86e9-aa7e557f8faa\") " pod="openstack/heat-api-c5884cff9-92mmk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.210868 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7306bf5-ea9b-4121-a0b0-af1603503993-combined-ca-bundle\") pod \"heat-cfnapi-6949b76cf4-kw8lk\" (UID: \"e7306bf5-ea9b-4121-a0b0-af1603503993\") " pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.210949 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7306bf5-ea9b-4121-a0b0-af1603503993-config-data\") pod \"heat-cfnapi-6949b76cf4-kw8lk\" (UID: \"e7306bf5-ea9b-4121-a0b0-af1603503993\") " pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.210966 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/107d7545-41aa-471a-86e9-aa7e557f8faa-combined-ca-bundle\") pod \"heat-api-c5884cff9-92mmk\" (UID: \"107d7545-41aa-471a-86e9-aa7e557f8faa\") " pod="openstack/heat-api-c5884cff9-92mmk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.211010 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/107d7545-41aa-471a-86e9-aa7e557f8faa-config-data-custom\") pod \"heat-api-c5884cff9-92mmk\" (UID: \"107d7545-41aa-471a-86e9-aa7e557f8faa\") " pod="openstack/heat-api-c5884cff9-92mmk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.218034 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e7306bf5-ea9b-4121-a0b0-af1603503993-config-data-custom\") pod \"heat-cfnapi-6949b76cf4-kw8lk\" (UID: \"e7306bf5-ea9b-4121-a0b0-af1603503993\") " pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.224334 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7306bf5-ea9b-4121-a0b0-af1603503993-combined-ca-bundle\") pod \"heat-cfnapi-6949b76cf4-kw8lk\" (UID: \"e7306bf5-ea9b-4121-a0b0-af1603503993\") " pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.225180 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7306bf5-ea9b-4121-a0b0-af1603503993-config-data\") pod \"heat-cfnapi-6949b76cf4-kw8lk\" (UID: \"e7306bf5-ea9b-4121-a0b0-af1603503993\") " pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.241284 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lg5f2\" (UniqueName: \"kubernetes.io/projected/e7306bf5-ea9b-4121-a0b0-af1603503993-kube-api-access-lg5f2\") pod \"heat-cfnapi-6949b76cf4-kw8lk\" (UID: \"e7306bf5-ea9b-4121-a0b0-af1603503993\") " pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.315626 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/107d7545-41aa-471a-86e9-aa7e557f8faa-combined-ca-bundle\") pod \"heat-api-c5884cff9-92mmk\" (UID: \"107d7545-41aa-471a-86e9-aa7e557f8faa\") " pod="openstack/heat-api-c5884cff9-92mmk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.315742 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/107d7545-41aa-471a-86e9-aa7e557f8faa-config-data-custom\") pod \"heat-api-c5884cff9-92mmk\" (UID: \"107d7545-41aa-471a-86e9-aa7e557f8faa\") " pod="openstack/heat-api-c5884cff9-92mmk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.315798 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfjzv\" (UniqueName: \"kubernetes.io/projected/107d7545-41aa-471a-86e9-aa7e557f8faa-kube-api-access-jfjzv\") pod \"heat-api-c5884cff9-92mmk\" (UID: \"107d7545-41aa-471a-86e9-aa7e557f8faa\") " pod="openstack/heat-api-c5884cff9-92mmk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.315827 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/107d7545-41aa-471a-86e9-aa7e557f8faa-config-data\") pod \"heat-api-c5884cff9-92mmk\" (UID: \"107d7545-41aa-471a-86e9-aa7e557f8faa\") " pod="openstack/heat-api-c5884cff9-92mmk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.321533 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/107d7545-41aa-471a-86e9-aa7e557f8faa-combined-ca-bundle\") pod \"heat-api-c5884cff9-92mmk\" (UID: \"107d7545-41aa-471a-86e9-aa7e557f8faa\") " pod="openstack/heat-api-c5884cff9-92mmk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.323586 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/107d7545-41aa-471a-86e9-aa7e557f8faa-config-data\") pod \"heat-api-c5884cff9-92mmk\" (UID: \"107d7545-41aa-471a-86e9-aa7e557f8faa\") " pod="openstack/heat-api-c5884cff9-92mmk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.326284 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/107d7545-41aa-471a-86e9-aa7e557f8faa-config-data-custom\") pod \"heat-api-c5884cff9-92mmk\" (UID: \"107d7545-41aa-471a-86e9-aa7e557f8faa\") " pod="openstack/heat-api-c5884cff9-92mmk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.345712 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfjzv\" (UniqueName: \"kubernetes.io/projected/107d7545-41aa-471a-86e9-aa7e557f8faa-kube-api-access-jfjzv\") pod \"heat-api-c5884cff9-92mmk\" (UID: \"107d7545-41aa-471a-86e9-aa7e557f8faa\") " pod="openstack/heat-api-c5884cff9-92mmk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.363770 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.383510 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.428807 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-c5884cff9-92mmk" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.610788 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.708947 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-6c8878cd54-q8bdj"] Sep 30 00:30:41 crc kubenswrapper[4809]: I0930 00:30:41.967394 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-6949b76cf4-kw8lk"] Sep 30 00:30:42 crc kubenswrapper[4809]: W0930 00:30:42.001290 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode7306bf5_ea9b_4121_a0b0_af1603503993.slice/crio-69146bdea159355074245d7f4a5d9d2e5c1d0298bda123c4b6254db14e18b767 WatchSource:0}: Error finding container 69146bdea159355074245d7f4a5d9d2e5c1d0298bda123c4b6254db14e18b767: Status 404 returned error can't find the container with id 69146bdea159355074245d7f4a5d9d2e5c1d0298bda123c4b6254db14e18b767 Sep 30 00:30:42 crc kubenswrapper[4809]: I0930 00:30:42.077119 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f6bc4c6c9-r57b4"] Sep 30 00:30:42 crc kubenswrapper[4809]: W0930 00:30:42.092317 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf585e9ed_4d32_49f6_84af_87ba0d4093d9.slice/crio-a71a857249ad0fb47eb3aa5c7e6039b700e4741c895328ad8ffe6620b931a07f WatchSource:0}: Error finding container a71a857249ad0fb47eb3aa5c7e6039b700e4741c895328ad8ffe6620b931a07f: Status 404 returned error can't find the container with id a71a857249ad0fb47eb3aa5c7e6039b700e4741c895328ad8ffe6620b931a07f Sep 30 00:30:42 crc kubenswrapper[4809]: I0930 00:30:42.112030 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-c5884cff9-92mmk"] Sep 30 00:30:42 crc kubenswrapper[4809]: I0930 00:30:42.380890 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" event={"ID":"e7306bf5-ea9b-4121-a0b0-af1603503993","Type":"ContainerStarted","Data":"69146bdea159355074245d7f4a5d9d2e5c1d0298bda123c4b6254db14e18b767"} Sep 30 00:30:42 crc kubenswrapper[4809]: I0930 00:30:42.383381 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-6c8878cd54-q8bdj" event={"ID":"30ff7679-5c63-4bb2-a427-90410586d459","Type":"ContainerStarted","Data":"5b8cdaf86e9e37f54b743d5de635867846d5e5fc9ae82a631d226b8b99e4a7be"} Sep 30 00:30:42 crc kubenswrapper[4809]: I0930 00:30:42.383490 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-6c8878cd54-q8bdj" event={"ID":"30ff7679-5c63-4bb2-a427-90410586d459","Type":"ContainerStarted","Data":"3fb5a0dd8720e4491057fec1ecea10a9454198b8f6e120fd18c7142c25a3f38b"} Sep 30 00:30:42 crc kubenswrapper[4809]: I0930 00:30:42.384166 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-6c8878cd54-q8bdj" Sep 30 00:30:42 crc kubenswrapper[4809]: I0930 00:30:42.390108 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" event={"ID":"f585e9ed-4d32-49f6-84af-87ba0d4093d9","Type":"ContainerStarted","Data":"a71a857249ad0fb47eb3aa5c7e6039b700e4741c895328ad8ffe6620b931a07f"} Sep 30 00:30:42 crc kubenswrapper[4809]: I0930 00:30:42.392098 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-c5884cff9-92mmk" event={"ID":"107d7545-41aa-471a-86e9-aa7e557f8faa","Type":"ContainerStarted","Data":"323501b65d68f1c3832687f1fe9ac1445c14e0b4367cac6963202e0015d93214"} Sep 30 00:30:42 crc kubenswrapper[4809]: I0930 00:30:42.422497 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-6c8878cd54-q8bdj" podStartSLOduration=2.422472916 podStartE2EDuration="2.422472916s" podCreationTimestamp="2025-09-30 00:30:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:30:42.415423133 +0000 UTC m=+1293.451672541" watchObservedRunningTime="2025-09-30 00:30:42.422472916 +0000 UTC m=+1293.458722334" Sep 30 00:30:42 crc kubenswrapper[4809]: I0930 00:30:42.632289 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:30:42 crc kubenswrapper[4809]: I0930 00:30:42.632629 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a5588ead-01ab-4225-bdf0-17270bff107a" containerName="ceilometer-central-agent" containerID="cri-o://79b576470e558a67838f1d9d0c37245031cb3921ada5cd367ecc5556835d59be" gracePeriod=30 Sep 30 00:30:42 crc kubenswrapper[4809]: I0930 00:30:42.633499 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a5588ead-01ab-4225-bdf0-17270bff107a" containerName="proxy-httpd" containerID="cri-o://9a8edb52e80b8ccba70418b15f50bb39fdddb2169416de42242c90501a80ca66" gracePeriod=30 Sep 30 00:30:42 crc kubenswrapper[4809]: I0930 00:30:42.633572 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a5588ead-01ab-4225-bdf0-17270bff107a" containerName="sg-core" containerID="cri-o://b0f6b297d991b755574922aa95c689560b0a91ef36ce142e30db7ca988566e15" gracePeriod=30 Sep 30 00:30:42 crc kubenswrapper[4809]: I0930 00:30:42.633615 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a5588ead-01ab-4225-bdf0-17270bff107a" containerName="ceilometer-notification-agent" containerID="cri-o://3ad6813185ae2f11af12622feed0a1aef0044298c4c2250e9a57a37084bf82d3" gracePeriod=30 Sep 30 00:30:42 crc kubenswrapper[4809]: I0930 00:30:42.643178 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 00:30:43 crc kubenswrapper[4809]: I0930 00:30:43.406554 4809 generic.go:334] "Generic (PLEG): container finished" podID="a5588ead-01ab-4225-bdf0-17270bff107a" containerID="9a8edb52e80b8ccba70418b15f50bb39fdddb2169416de42242c90501a80ca66" exitCode=0 Sep 30 00:30:43 crc kubenswrapper[4809]: I0930 00:30:43.406783 4809 generic.go:334] "Generic (PLEG): container finished" podID="a5588ead-01ab-4225-bdf0-17270bff107a" containerID="b0f6b297d991b755574922aa95c689560b0a91ef36ce142e30db7ca988566e15" exitCode=2 Sep 30 00:30:43 crc kubenswrapper[4809]: I0930 00:30:43.406811 4809 generic.go:334] "Generic (PLEG): container finished" podID="a5588ead-01ab-4225-bdf0-17270bff107a" containerID="79b576470e558a67838f1d9d0c37245031cb3921ada5cd367ecc5556835d59be" exitCode=0 Sep 30 00:30:43 crc kubenswrapper[4809]: I0930 00:30:43.406852 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5588ead-01ab-4225-bdf0-17270bff107a","Type":"ContainerDied","Data":"9a8edb52e80b8ccba70418b15f50bb39fdddb2169416de42242c90501a80ca66"} Sep 30 00:30:43 crc kubenswrapper[4809]: I0930 00:30:43.406876 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5588ead-01ab-4225-bdf0-17270bff107a","Type":"ContainerDied","Data":"b0f6b297d991b755574922aa95c689560b0a91ef36ce142e30db7ca988566e15"} Sep 30 00:30:43 crc kubenswrapper[4809]: I0930 00:30:43.406898 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5588ead-01ab-4225-bdf0-17270bff107a","Type":"ContainerDied","Data":"79b576470e558a67838f1d9d0c37245031cb3921ada5cd367ecc5556835d59be"} Sep 30 00:30:43 crc kubenswrapper[4809]: I0930 00:30:43.408762 4809 generic.go:334] "Generic (PLEG): container finished" podID="f585e9ed-4d32-49f6-84af-87ba0d4093d9" containerID="282071e58843d368f5fc2832bb70e1bdaf78a98cda8e656caab4a77656287b8b" exitCode=0 Sep 30 00:30:43 crc kubenswrapper[4809]: I0930 00:30:43.408962 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" event={"ID":"f585e9ed-4d32-49f6-84af-87ba0d4093d9","Type":"ContainerDied","Data":"282071e58843d368f5fc2832bb70e1bdaf78a98cda8e656caab4a77656287b8b"} Sep 30 00:30:44 crc kubenswrapper[4809]: I0930 00:30:44.422323 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" event={"ID":"f585e9ed-4d32-49f6-84af-87ba0d4093d9","Type":"ContainerStarted","Data":"c7e4488d8ae93abb023004d909c95e94370565ae43d03fd660092d4e7c823dcd"} Sep 30 00:30:44 crc kubenswrapper[4809]: I0930 00:30:44.422774 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:44 crc kubenswrapper[4809]: I0930 00:30:44.447431 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" podStartSLOduration=4.447412215 podStartE2EDuration="4.447412215s" podCreationTimestamp="2025-09-30 00:30:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:30:44.444606048 +0000 UTC m=+1295.480855466" watchObservedRunningTime="2025-09-30 00:30:44.447412215 +0000 UTC m=+1295.483661623" Sep 30 00:30:45 crc kubenswrapper[4809]: I0930 00:30:45.071653 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="a5588ead-01ab-4225-bdf0-17270bff107a" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.186:3000/\": dial tcp 10.217.0.186:3000: connect: connection refused" Sep 30 00:30:46 crc kubenswrapper[4809]: I0930 00:30:46.457200 4809 generic.go:334] "Generic (PLEG): container finished" podID="a5588ead-01ab-4225-bdf0-17270bff107a" containerID="3ad6813185ae2f11af12622feed0a1aef0044298c4c2250e9a57a37084bf82d3" exitCode=0 Sep 30 00:30:46 crc kubenswrapper[4809]: I0930 00:30:46.457525 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5588ead-01ab-4225-bdf0-17270bff107a","Type":"ContainerDied","Data":"3ad6813185ae2f11af12622feed0a1aef0044298c4c2250e9a57a37084bf82d3"} Sep 30 00:30:46 crc kubenswrapper[4809]: I0930 00:30:46.942460 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.201745 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-76dd7988df-8d5vk"] Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.205230 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.208739 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.214241 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.214438 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.283295 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-76dd7988df-8d5vk"] Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.306922 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0f29e497-2911-4dc0-8a6d-b59c8d254f60-run-httpd\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.306973 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0f29e497-2911-4dc0-8a6d-b59c8d254f60-log-httpd\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.307018 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0f29e497-2911-4dc0-8a6d-b59c8d254f60-etc-swift\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.307035 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f29e497-2911-4dc0-8a6d-b59c8d254f60-internal-tls-certs\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.307106 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f29e497-2911-4dc0-8a6d-b59c8d254f60-combined-ca-bundle\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.307139 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f29e497-2911-4dc0-8a6d-b59c8d254f60-config-data\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.307280 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f29e497-2911-4dc0-8a6d-b59c8d254f60-public-tls-certs\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.307327 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9d5b\" (UniqueName: \"kubernetes.io/projected/0f29e497-2911-4dc0-8a6d-b59c8d254f60-kube-api-access-c9d5b\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.414796 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0f29e497-2911-4dc0-8a6d-b59c8d254f60-run-httpd\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.414859 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0f29e497-2911-4dc0-8a6d-b59c8d254f60-log-httpd\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.414894 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0f29e497-2911-4dc0-8a6d-b59c8d254f60-etc-swift\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.414917 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f29e497-2911-4dc0-8a6d-b59c8d254f60-internal-tls-certs\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.414964 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f29e497-2911-4dc0-8a6d-b59c8d254f60-combined-ca-bundle\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.414992 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f29e497-2911-4dc0-8a6d-b59c8d254f60-config-data\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.415086 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f29e497-2911-4dc0-8a6d-b59c8d254f60-public-tls-certs\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.415130 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9d5b\" (UniqueName: \"kubernetes.io/projected/0f29e497-2911-4dc0-8a6d-b59c8d254f60-kube-api-access-c9d5b\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.417330 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0f29e497-2911-4dc0-8a6d-b59c8d254f60-run-httpd\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.418331 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0f29e497-2911-4dc0-8a6d-b59c8d254f60-log-httpd\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.424017 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f29e497-2911-4dc0-8a6d-b59c8d254f60-config-data\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.424798 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f29e497-2911-4dc0-8a6d-b59c8d254f60-public-tls-certs\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.427219 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f29e497-2911-4dc0-8a6d-b59c8d254f60-combined-ca-bundle\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.432964 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0f29e497-2911-4dc0-8a6d-b59c8d254f60-etc-swift\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.433609 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9d5b\" (UniqueName: \"kubernetes.io/projected/0f29e497-2911-4dc0-8a6d-b59c8d254f60-kube-api-access-c9d5b\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.437349 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f29e497-2911-4dc0-8a6d-b59c8d254f60-internal-tls-certs\") pod \"swift-proxy-76dd7988df-8d5vk\" (UID: \"0f29e497-2911-4dc0-8a6d-b59c8d254f60\") " pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:47 crc kubenswrapper[4809]: I0930 00:30:47.551954 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.722278 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-7c4c5654b8-svs85"] Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.723541 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-7c4c5654b8-svs85" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.756079 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-7cc584f69d-tt2h7"] Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.757577 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.764231 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-7c4c5654b8-svs85"] Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.777103 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-84cd7bbbc9-7cs5m"] Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.778383 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-84cd7bbbc9-7cs5m" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.788772 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-84cd7bbbc9-7cs5m"] Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.818630 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-7cc584f69d-tt2h7"] Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.842676 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrzr8\" (UniqueName: \"kubernetes.io/projected/e8a858b6-8b7c-4a4d-805f-d129492e0f57-kube-api-access-lrzr8\") pod \"heat-engine-7c4c5654b8-svs85\" (UID: \"e8a858b6-8b7c-4a4d-805f-d129492e0f57\") " pod="openstack/heat-engine-7c4c5654b8-svs85" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.842896 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8a858b6-8b7c-4a4d-805f-d129492e0f57-combined-ca-bundle\") pod \"heat-engine-7c4c5654b8-svs85\" (UID: \"e8a858b6-8b7c-4a4d-805f-d129492e0f57\") " pod="openstack/heat-engine-7c4c5654b8-svs85" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.842924 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8a858b6-8b7c-4a4d-805f-d129492e0f57-config-data-custom\") pod \"heat-engine-7c4c5654b8-svs85\" (UID: \"e8a858b6-8b7c-4a4d-805f-d129492e0f57\") " pod="openstack/heat-engine-7c4c5654b8-svs85" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.842965 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8a858b6-8b7c-4a4d-805f-d129492e0f57-config-data\") pod \"heat-engine-7c4c5654b8-svs85\" (UID: \"e8a858b6-8b7c-4a4d-805f-d129492e0f57\") " pod="openstack/heat-engine-7c4c5654b8-svs85" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.944259 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8a858b6-8b7c-4a4d-805f-d129492e0f57-config-data-custom\") pod \"heat-engine-7c4c5654b8-svs85\" (UID: \"e8a858b6-8b7c-4a4d-805f-d129492e0f57\") " pod="openstack/heat-engine-7c4c5654b8-svs85" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.944318 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8a858b6-8b7c-4a4d-805f-d129492e0f57-config-data\") pod \"heat-engine-7c4c5654b8-svs85\" (UID: \"e8a858b6-8b7c-4a4d-805f-d129492e0f57\") " pod="openstack/heat-engine-7c4c5654b8-svs85" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.944349 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bc06dad-a168-4690-ab0d-5c14b05ef072-config-data\") pod \"heat-cfnapi-7cc584f69d-tt2h7\" (UID: \"9bc06dad-a168-4690-ab0d-5c14b05ef072\") " pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.944424 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrzr8\" (UniqueName: \"kubernetes.io/projected/e8a858b6-8b7c-4a4d-805f-d129492e0f57-kube-api-access-lrzr8\") pod \"heat-engine-7c4c5654b8-svs85\" (UID: \"e8a858b6-8b7c-4a4d-805f-d129492e0f57\") " pod="openstack/heat-engine-7c4c5654b8-svs85" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.944442 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37e852d4-1ac6-4168-b9c5-1a3cab13a676-combined-ca-bundle\") pod \"heat-api-84cd7bbbc9-7cs5m\" (UID: \"37e852d4-1ac6-4168-b9c5-1a3cab13a676\") " pod="openstack/heat-api-84cd7bbbc9-7cs5m" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.944688 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9bc06dad-a168-4690-ab0d-5c14b05ef072-config-data-custom\") pod \"heat-cfnapi-7cc584f69d-tt2h7\" (UID: \"9bc06dad-a168-4690-ab0d-5c14b05ef072\") " pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.944749 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bc06dad-a168-4690-ab0d-5c14b05ef072-combined-ca-bundle\") pod \"heat-cfnapi-7cc584f69d-tt2h7\" (UID: \"9bc06dad-a168-4690-ab0d-5c14b05ef072\") " pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.944852 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czlrc\" (UniqueName: \"kubernetes.io/projected/9bc06dad-a168-4690-ab0d-5c14b05ef072-kube-api-access-czlrc\") pod \"heat-cfnapi-7cc584f69d-tt2h7\" (UID: \"9bc06dad-a168-4690-ab0d-5c14b05ef072\") " pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.945013 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhmcx\" (UniqueName: \"kubernetes.io/projected/37e852d4-1ac6-4168-b9c5-1a3cab13a676-kube-api-access-fhmcx\") pod \"heat-api-84cd7bbbc9-7cs5m\" (UID: \"37e852d4-1ac6-4168-b9c5-1a3cab13a676\") " pod="openstack/heat-api-84cd7bbbc9-7cs5m" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.945048 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/37e852d4-1ac6-4168-b9c5-1a3cab13a676-config-data-custom\") pod \"heat-api-84cd7bbbc9-7cs5m\" (UID: \"37e852d4-1ac6-4168-b9c5-1a3cab13a676\") " pod="openstack/heat-api-84cd7bbbc9-7cs5m" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.945147 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37e852d4-1ac6-4168-b9c5-1a3cab13a676-config-data\") pod \"heat-api-84cd7bbbc9-7cs5m\" (UID: \"37e852d4-1ac6-4168-b9c5-1a3cab13a676\") " pod="openstack/heat-api-84cd7bbbc9-7cs5m" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.945309 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8a858b6-8b7c-4a4d-805f-d129492e0f57-combined-ca-bundle\") pod \"heat-engine-7c4c5654b8-svs85\" (UID: \"e8a858b6-8b7c-4a4d-805f-d129492e0f57\") " pod="openstack/heat-engine-7c4c5654b8-svs85" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.950377 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8a858b6-8b7c-4a4d-805f-d129492e0f57-config-data-custom\") pod \"heat-engine-7c4c5654b8-svs85\" (UID: \"e8a858b6-8b7c-4a4d-805f-d129492e0f57\") " pod="openstack/heat-engine-7c4c5654b8-svs85" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.951925 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8a858b6-8b7c-4a4d-805f-d129492e0f57-config-data\") pod \"heat-engine-7c4c5654b8-svs85\" (UID: \"e8a858b6-8b7c-4a4d-805f-d129492e0f57\") " pod="openstack/heat-engine-7c4c5654b8-svs85" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.960326 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8a858b6-8b7c-4a4d-805f-d129492e0f57-combined-ca-bundle\") pod \"heat-engine-7c4c5654b8-svs85\" (UID: \"e8a858b6-8b7c-4a4d-805f-d129492e0f57\") " pod="openstack/heat-engine-7c4c5654b8-svs85" Sep 30 00:30:48 crc kubenswrapper[4809]: I0930 00:30:48.961176 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrzr8\" (UniqueName: \"kubernetes.io/projected/e8a858b6-8b7c-4a4d-805f-d129492e0f57-kube-api-access-lrzr8\") pod \"heat-engine-7c4c5654b8-svs85\" (UID: \"e8a858b6-8b7c-4a4d-805f-d129492e0f57\") " pod="openstack/heat-engine-7c4c5654b8-svs85" Sep 30 00:30:49 crc kubenswrapper[4809]: I0930 00:30:49.047600 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37e852d4-1ac6-4168-b9c5-1a3cab13a676-combined-ca-bundle\") pod \"heat-api-84cd7bbbc9-7cs5m\" (UID: \"37e852d4-1ac6-4168-b9c5-1a3cab13a676\") " pod="openstack/heat-api-84cd7bbbc9-7cs5m" Sep 30 00:30:49 crc kubenswrapper[4809]: I0930 00:30:49.047808 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9bc06dad-a168-4690-ab0d-5c14b05ef072-config-data-custom\") pod \"heat-cfnapi-7cc584f69d-tt2h7\" (UID: \"9bc06dad-a168-4690-ab0d-5c14b05ef072\") " pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" Sep 30 00:30:49 crc kubenswrapper[4809]: I0930 00:30:49.047864 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bc06dad-a168-4690-ab0d-5c14b05ef072-combined-ca-bundle\") pod \"heat-cfnapi-7cc584f69d-tt2h7\" (UID: \"9bc06dad-a168-4690-ab0d-5c14b05ef072\") " pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" Sep 30 00:30:49 crc kubenswrapper[4809]: I0930 00:30:49.048963 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czlrc\" (UniqueName: \"kubernetes.io/projected/9bc06dad-a168-4690-ab0d-5c14b05ef072-kube-api-access-czlrc\") pod \"heat-cfnapi-7cc584f69d-tt2h7\" (UID: \"9bc06dad-a168-4690-ab0d-5c14b05ef072\") " pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" Sep 30 00:30:49 crc kubenswrapper[4809]: I0930 00:30:49.049235 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhmcx\" (UniqueName: \"kubernetes.io/projected/37e852d4-1ac6-4168-b9c5-1a3cab13a676-kube-api-access-fhmcx\") pod \"heat-api-84cd7bbbc9-7cs5m\" (UID: \"37e852d4-1ac6-4168-b9c5-1a3cab13a676\") " pod="openstack/heat-api-84cd7bbbc9-7cs5m" Sep 30 00:30:49 crc kubenswrapper[4809]: I0930 00:30:49.049308 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/37e852d4-1ac6-4168-b9c5-1a3cab13a676-config-data-custom\") pod \"heat-api-84cd7bbbc9-7cs5m\" (UID: \"37e852d4-1ac6-4168-b9c5-1a3cab13a676\") " pod="openstack/heat-api-84cd7bbbc9-7cs5m" Sep 30 00:30:49 crc kubenswrapper[4809]: I0930 00:30:49.049405 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37e852d4-1ac6-4168-b9c5-1a3cab13a676-config-data\") pod \"heat-api-84cd7bbbc9-7cs5m\" (UID: \"37e852d4-1ac6-4168-b9c5-1a3cab13a676\") " pod="openstack/heat-api-84cd7bbbc9-7cs5m" Sep 30 00:30:49 crc kubenswrapper[4809]: I0930 00:30:49.049691 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bc06dad-a168-4690-ab0d-5c14b05ef072-config-data\") pod \"heat-cfnapi-7cc584f69d-tt2h7\" (UID: \"9bc06dad-a168-4690-ab0d-5c14b05ef072\") " pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" Sep 30 00:30:49 crc kubenswrapper[4809]: I0930 00:30:49.051446 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37e852d4-1ac6-4168-b9c5-1a3cab13a676-combined-ca-bundle\") pod \"heat-api-84cd7bbbc9-7cs5m\" (UID: \"37e852d4-1ac6-4168-b9c5-1a3cab13a676\") " pod="openstack/heat-api-84cd7bbbc9-7cs5m" Sep 30 00:30:49 crc kubenswrapper[4809]: I0930 00:30:49.051957 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bc06dad-a168-4690-ab0d-5c14b05ef072-combined-ca-bundle\") pod \"heat-cfnapi-7cc584f69d-tt2h7\" (UID: \"9bc06dad-a168-4690-ab0d-5c14b05ef072\") " pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" Sep 30 00:30:49 crc kubenswrapper[4809]: I0930 00:30:49.052267 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-7c4c5654b8-svs85" Sep 30 00:30:49 crc kubenswrapper[4809]: I0930 00:30:49.055590 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9bc06dad-a168-4690-ab0d-5c14b05ef072-config-data-custom\") pod \"heat-cfnapi-7cc584f69d-tt2h7\" (UID: \"9bc06dad-a168-4690-ab0d-5c14b05ef072\") " pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" Sep 30 00:30:49 crc kubenswrapper[4809]: I0930 00:30:49.055725 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bc06dad-a168-4690-ab0d-5c14b05ef072-config-data\") pod \"heat-cfnapi-7cc584f69d-tt2h7\" (UID: \"9bc06dad-a168-4690-ab0d-5c14b05ef072\") " pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" Sep 30 00:30:49 crc kubenswrapper[4809]: I0930 00:30:49.057929 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/37e852d4-1ac6-4168-b9c5-1a3cab13a676-config-data-custom\") pod \"heat-api-84cd7bbbc9-7cs5m\" (UID: \"37e852d4-1ac6-4168-b9c5-1a3cab13a676\") " pod="openstack/heat-api-84cd7bbbc9-7cs5m" Sep 30 00:30:49 crc kubenswrapper[4809]: I0930 00:30:49.062941 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37e852d4-1ac6-4168-b9c5-1a3cab13a676-config-data\") pod \"heat-api-84cd7bbbc9-7cs5m\" (UID: \"37e852d4-1ac6-4168-b9c5-1a3cab13a676\") " pod="openstack/heat-api-84cd7bbbc9-7cs5m" Sep 30 00:30:49 crc kubenswrapper[4809]: I0930 00:30:49.066316 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhmcx\" (UniqueName: \"kubernetes.io/projected/37e852d4-1ac6-4168-b9c5-1a3cab13a676-kube-api-access-fhmcx\") pod \"heat-api-84cd7bbbc9-7cs5m\" (UID: \"37e852d4-1ac6-4168-b9c5-1a3cab13a676\") " pod="openstack/heat-api-84cd7bbbc9-7cs5m" Sep 30 00:30:49 crc kubenswrapper[4809]: I0930 00:30:49.068216 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czlrc\" (UniqueName: \"kubernetes.io/projected/9bc06dad-a168-4690-ab0d-5c14b05ef072-kube-api-access-czlrc\") pod \"heat-cfnapi-7cc584f69d-tt2h7\" (UID: \"9bc06dad-a168-4690-ab0d-5c14b05ef072\") " pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" Sep 30 00:30:49 crc kubenswrapper[4809]: I0930 00:30:49.100990 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" Sep 30 00:30:49 crc kubenswrapper[4809]: I0930 00:30:49.116033 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-84cd7bbbc9-7cs5m" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.416652 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-c5884cff9-92mmk"] Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.429708 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-6949b76cf4-kw8lk"] Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.447048 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-c5d74679-msnb4"] Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.448460 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.451242 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-api-internal-svc" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.451449 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-api-public-svc" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.463293 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-c5d74679-msnb4"] Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.471351 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-75b57bc64b-sfpzl"] Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.474340 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.478011 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-cfnapi-public-svc" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.478304 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-cfnapi-internal-svc" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.517406 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-75b57bc64b-sfpzl"] Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.582816 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-combined-ca-bundle\") pod \"heat-api-c5d74679-msnb4\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.582899 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bj4px\" (UniqueName: \"kubernetes.io/projected/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-kube-api-access-bj4px\") pod \"heat-cfnapi-75b57bc64b-sfpzl\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.582982 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-config-data-custom\") pod \"heat-cfnapi-75b57bc64b-sfpzl\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.583053 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-config-data-custom\") pod \"heat-api-c5d74679-msnb4\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.583077 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-public-tls-certs\") pod \"heat-api-c5d74679-msnb4\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.583114 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-internal-tls-certs\") pod \"heat-api-c5d74679-msnb4\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.583143 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-config-data\") pod \"heat-api-c5d74679-msnb4\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.583173 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-public-tls-certs\") pod \"heat-cfnapi-75b57bc64b-sfpzl\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.583203 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-internal-tls-certs\") pod \"heat-cfnapi-75b57bc64b-sfpzl\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.583243 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-combined-ca-bundle\") pod \"heat-cfnapi-75b57bc64b-sfpzl\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.583277 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-config-data\") pod \"heat-cfnapi-75b57bc64b-sfpzl\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.587681 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlkh4\" (UniqueName: \"kubernetes.io/projected/b84f683b-2b54-4f11-a13a-104543f646a3-kube-api-access-zlkh4\") pod \"heat-api-c5d74679-msnb4\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.689539 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-internal-tls-certs\") pod \"heat-api-c5d74679-msnb4\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.689580 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-config-data\") pod \"heat-api-c5d74679-msnb4\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.689619 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-public-tls-certs\") pod \"heat-cfnapi-75b57bc64b-sfpzl\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.689662 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-internal-tls-certs\") pod \"heat-cfnapi-75b57bc64b-sfpzl\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.689694 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-combined-ca-bundle\") pod \"heat-cfnapi-75b57bc64b-sfpzl\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.689741 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-config-data\") pod \"heat-cfnapi-75b57bc64b-sfpzl\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.689762 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlkh4\" (UniqueName: \"kubernetes.io/projected/b84f683b-2b54-4f11-a13a-104543f646a3-kube-api-access-zlkh4\") pod \"heat-api-c5d74679-msnb4\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.689790 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-combined-ca-bundle\") pod \"heat-api-c5d74679-msnb4\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.689831 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bj4px\" (UniqueName: \"kubernetes.io/projected/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-kube-api-access-bj4px\") pod \"heat-cfnapi-75b57bc64b-sfpzl\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.689902 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-config-data-custom\") pod \"heat-cfnapi-75b57bc64b-sfpzl\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.689943 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-config-data-custom\") pod \"heat-api-c5d74679-msnb4\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.689980 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-public-tls-certs\") pod \"heat-api-c5d74679-msnb4\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.698547 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-combined-ca-bundle\") pod \"heat-api-c5d74679-msnb4\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.698792 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-combined-ca-bundle\") pod \"heat-cfnapi-75b57bc64b-sfpzl\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.698954 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-config-data\") pod \"heat-cfnapi-75b57bc64b-sfpzl\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.699404 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-internal-tls-certs\") pod \"heat-cfnapi-75b57bc64b-sfpzl\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.700538 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-public-tls-certs\") pod \"heat-cfnapi-75b57bc64b-sfpzl\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.701445 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-config-data-custom\") pod \"heat-cfnapi-75b57bc64b-sfpzl\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.704457 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-public-tls-certs\") pod \"heat-api-c5d74679-msnb4\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.704878 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-config-data\") pod \"heat-api-c5d74679-msnb4\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.705834 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-internal-tls-certs\") pod \"heat-api-c5d74679-msnb4\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.717462 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-config-data-custom\") pod \"heat-api-c5d74679-msnb4\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.722013 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlkh4\" (UniqueName: \"kubernetes.io/projected/b84f683b-2b54-4f11-a13a-104543f646a3-kube-api-access-zlkh4\") pod \"heat-api-c5d74679-msnb4\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.727676 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bj4px\" (UniqueName: \"kubernetes.io/projected/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-kube-api-access-bj4px\") pod \"heat-cfnapi-75b57bc64b-sfpzl\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.809194 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:50 crc kubenswrapper[4809]: I0930 00:30:50.826997 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:51 crc kubenswrapper[4809]: I0930 00:30:51.235796 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-6c8878cd54-q8bdj" Sep 30 00:30:51 crc kubenswrapper[4809]: I0930 00:30:51.365853 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:30:51 crc kubenswrapper[4809]: I0930 00:30:51.427870 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-6xtp8"] Sep 30 00:30:51 crc kubenswrapper[4809]: I0930 00:30:51.428082 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" podUID="c6150622-6dfa-40f4-8d82-13e29dbcae08" containerName="dnsmasq-dns" containerID="cri-o://8aa35f0f32d41c49084edbbb21b99149fb7269921b35410be0ddb251bedcc687" gracePeriod=10 Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.273206 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-bh5sc"] Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.278398 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-bh5sc" Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.295803 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-bh5sc"] Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.346869 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-pkq57"] Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.348634 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-pkq57" Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.374636 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-pkq57"] Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.426613 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k54mj\" (UniqueName: \"kubernetes.io/projected/97460645-6cce-4b0f-b2a2-d80caee414cd-kube-api-access-k54mj\") pod \"nova-api-db-create-bh5sc\" (UID: \"97460645-6cce-4b0f-b2a2-d80caee414cd\") " pod="openstack/nova-api-db-create-bh5sc" Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.450048 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-96wlx"] Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.452725 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-96wlx" Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.455278 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-96wlx"] Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.528528 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvkh6\" (UniqueName: \"kubernetes.io/projected/9152f576-abed-4cb6-97ee-3a4efe2ba97f-kube-api-access-rvkh6\") pod \"nova-cell0-db-create-pkq57\" (UID: \"9152f576-abed-4cb6-97ee-3a4efe2ba97f\") " pod="openstack/nova-cell0-db-create-pkq57" Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.528726 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k54mj\" (UniqueName: \"kubernetes.io/projected/97460645-6cce-4b0f-b2a2-d80caee414cd-kube-api-access-k54mj\") pod \"nova-api-db-create-bh5sc\" (UID: \"97460645-6cce-4b0f-b2a2-d80caee414cd\") " pod="openstack/nova-api-db-create-bh5sc" Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.553279 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k54mj\" (UniqueName: \"kubernetes.io/projected/97460645-6cce-4b0f-b2a2-d80caee414cd-kube-api-access-k54mj\") pod \"nova-api-db-create-bh5sc\" (UID: \"97460645-6cce-4b0f-b2a2-d80caee414cd\") " pod="openstack/nova-api-db-create-bh5sc" Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.564832 4809 generic.go:334] "Generic (PLEG): container finished" podID="c6150622-6dfa-40f4-8d82-13e29dbcae08" containerID="8aa35f0f32d41c49084edbbb21b99149fb7269921b35410be0ddb251bedcc687" exitCode=0 Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.564879 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" event={"ID":"c6150622-6dfa-40f4-8d82-13e29dbcae08","Type":"ContainerDied","Data":"8aa35f0f32d41c49084edbbb21b99149fb7269921b35410be0ddb251bedcc687"} Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.632383 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvkh6\" (UniqueName: \"kubernetes.io/projected/9152f576-abed-4cb6-97ee-3a4efe2ba97f-kube-api-access-rvkh6\") pod \"nova-cell0-db-create-pkq57\" (UID: \"9152f576-abed-4cb6-97ee-3a4efe2ba97f\") " pod="openstack/nova-cell0-db-create-pkq57" Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.636249 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.636746 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zs8m\" (UniqueName: \"kubernetes.io/projected/8de17215-f27c-4eab-a6f5-0ad924367b11-kube-api-access-4zs8m\") pod \"nova-cell1-db-create-96wlx\" (UID: \"8de17215-f27c-4eab-a6f5-0ad924367b11\") " pod="openstack/nova-cell1-db-create-96wlx" Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.674969 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvkh6\" (UniqueName: \"kubernetes.io/projected/9152f576-abed-4cb6-97ee-3a4efe2ba97f-kube-api-access-rvkh6\") pod \"nova-cell0-db-create-pkq57\" (UID: \"9152f576-abed-4cb6-97ee-3a4efe2ba97f\") " pod="openstack/nova-cell0-db-create-pkq57" Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.738704 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-ovsdbserver-sb\") pod \"c6150622-6dfa-40f4-8d82-13e29dbcae08\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.738924 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-ovsdbserver-nb\") pod \"c6150622-6dfa-40f4-8d82-13e29dbcae08\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.738950 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-dns-swift-storage-0\") pod \"c6150622-6dfa-40f4-8d82-13e29dbcae08\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.738982 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jd56p\" (UniqueName: \"kubernetes.io/projected/c6150622-6dfa-40f4-8d82-13e29dbcae08-kube-api-access-jd56p\") pod \"c6150622-6dfa-40f4-8d82-13e29dbcae08\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.739011 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-dns-svc\") pod \"c6150622-6dfa-40f4-8d82-13e29dbcae08\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.739099 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-config\") pod \"c6150622-6dfa-40f4-8d82-13e29dbcae08\" (UID: \"c6150622-6dfa-40f4-8d82-13e29dbcae08\") " Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.739635 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zs8m\" (UniqueName: \"kubernetes.io/projected/8de17215-f27c-4eab-a6f5-0ad924367b11-kube-api-access-4zs8m\") pod \"nova-cell1-db-create-96wlx\" (UID: \"8de17215-f27c-4eab-a6f5-0ad924367b11\") " pod="openstack/nova-cell1-db-create-96wlx" Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.742989 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-bh5sc" Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.760767 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-pkq57" Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.763999 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6150622-6dfa-40f4-8d82-13e29dbcae08-kube-api-access-jd56p" (OuterVolumeSpecName: "kube-api-access-jd56p") pod "c6150622-6dfa-40f4-8d82-13e29dbcae08" (UID: "c6150622-6dfa-40f4-8d82-13e29dbcae08"). InnerVolumeSpecName "kube-api-access-jd56p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.765780 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zs8m\" (UniqueName: \"kubernetes.io/projected/8de17215-f27c-4eab-a6f5-0ad924367b11-kube-api-access-4zs8m\") pod \"nova-cell1-db-create-96wlx\" (UID: \"8de17215-f27c-4eab-a6f5-0ad924367b11\") " pod="openstack/nova-cell1-db-create-96wlx" Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.778043 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-96wlx" Sep 30 00:30:52 crc kubenswrapper[4809]: I0930 00:30:52.847719 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jd56p\" (UniqueName: \"kubernetes.io/projected/c6150622-6dfa-40f4-8d82-13e29dbcae08-kube-api-access-jd56p\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.058892 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.097108 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c6150622-6dfa-40f4-8d82-13e29dbcae08" (UID: "c6150622-6dfa-40f4-8d82-13e29dbcae08"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.125739 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c6150622-6dfa-40f4-8d82-13e29dbcae08" (UID: "c6150622-6dfa-40f4-8d82-13e29dbcae08"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.137403 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c6150622-6dfa-40f4-8d82-13e29dbcae08" (UID: "c6150622-6dfa-40f4-8d82-13e29dbcae08"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.153355 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c6150622-6dfa-40f4-8d82-13e29dbcae08" (UID: "c6150622-6dfa-40f4-8d82-13e29dbcae08"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.165663 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-config" (OuterVolumeSpecName: "config") pod "c6150622-6dfa-40f4-8d82-13e29dbcae08" (UID: "c6150622-6dfa-40f4-8d82-13e29dbcae08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.167994 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72ljw\" (UniqueName: \"kubernetes.io/projected/a5588ead-01ab-4225-bdf0-17270bff107a-kube-api-access-72ljw\") pod \"a5588ead-01ab-4225-bdf0-17270bff107a\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.168103 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5588ead-01ab-4225-bdf0-17270bff107a-log-httpd\") pod \"a5588ead-01ab-4225-bdf0-17270bff107a\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.168121 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-sg-core-conf-yaml\") pod \"a5588ead-01ab-4225-bdf0-17270bff107a\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.168186 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-combined-ca-bundle\") pod \"a5588ead-01ab-4225-bdf0-17270bff107a\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.168212 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5588ead-01ab-4225-bdf0-17270bff107a-run-httpd\") pod \"a5588ead-01ab-4225-bdf0-17270bff107a\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.168255 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-config-data\") pod \"a5588ead-01ab-4225-bdf0-17270bff107a\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.168320 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-scripts\") pod \"a5588ead-01ab-4225-bdf0-17270bff107a\" (UID: \"a5588ead-01ab-4225-bdf0-17270bff107a\") " Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.174800 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.175114 4809 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.175131 4809 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.175145 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.175158 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c6150622-6dfa-40f4-8d82-13e29dbcae08-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.174872 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5588ead-01ab-4225-bdf0-17270bff107a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a5588ead-01ab-4225-bdf0-17270bff107a" (UID: "a5588ead-01ab-4225-bdf0-17270bff107a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.175538 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5588ead-01ab-4225-bdf0-17270bff107a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a5588ead-01ab-4225-bdf0-17270bff107a" (UID: "a5588ead-01ab-4225-bdf0-17270bff107a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.190996 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5588ead-01ab-4225-bdf0-17270bff107a-kube-api-access-72ljw" (OuterVolumeSpecName: "kube-api-access-72ljw") pod "a5588ead-01ab-4225-bdf0-17270bff107a" (UID: "a5588ead-01ab-4225-bdf0-17270bff107a"). InnerVolumeSpecName "kube-api-access-72ljw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.198527 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-scripts" (OuterVolumeSpecName: "scripts") pod "a5588ead-01ab-4225-bdf0-17270bff107a" (UID: "a5588ead-01ab-4225-bdf0-17270bff107a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.276878 4809 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5588ead-01ab-4225-bdf0-17270bff107a-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.276907 4809 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5588ead-01ab-4225-bdf0-17270bff107a-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.276916 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.276924 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72ljw\" (UniqueName: \"kubernetes.io/projected/a5588ead-01ab-4225-bdf0-17270bff107a-kube-api-access-72ljw\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.377326 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a5588ead-01ab-4225-bdf0-17270bff107a" (UID: "a5588ead-01ab-4225-bdf0-17270bff107a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.378843 4809 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.425384 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a5588ead-01ab-4225-bdf0-17270bff107a" (UID: "a5588ead-01ab-4225-bdf0-17270bff107a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.483936 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.485798 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-c5d74679-msnb4"] Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.507236 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-84cd7bbbc9-7cs5m"] Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.520729 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-75b57bc64b-sfpzl"] Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.569330 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-config-data" (OuterVolumeSpecName: "config-data") pod "a5588ead-01ab-4225-bdf0-17270bff107a" (UID: "a5588ead-01ab-4225-bdf0-17270bff107a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.587306 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5588ead-01ab-4225-bdf0-17270bff107a-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.640309 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.640372 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-6xtp8" event={"ID":"c6150622-6dfa-40f4-8d82-13e29dbcae08","Type":"ContainerDied","Data":"22686bcc6b3d5e830d4227822c87becb0b07ffad1fc4d5e2d4a22ce8d023a9f7"} Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.640454 4809 scope.go:117] "RemoveContainer" containerID="8aa35f0f32d41c49084edbbb21b99149fb7269921b35410be0ddb251bedcc687" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.644966 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" event={"ID":"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42","Type":"ContainerStarted","Data":"93e119bb006e2ea4d012fcda60b2fa58a452fdd94089b43138075f5cd46ab7a6"} Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.651550 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-c5d74679-msnb4" event={"ID":"b84f683b-2b54-4f11-a13a-104543f646a3","Type":"ContainerStarted","Data":"03d0600b1e39121f0ebdd8b8e8deca8e635583d2c58e6b145b30515b67ac1b8f"} Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.668287 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-c5884cff9-92mmk" event={"ID":"107d7545-41aa-471a-86e9-aa7e557f8faa","Type":"ContainerStarted","Data":"339ee0eb9d2cdb95d4c179b15ee534f418a9351af26be753336c842e5cf078a7"} Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.668495 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-api-c5884cff9-92mmk" podUID="107d7545-41aa-471a-86e9-aa7e557f8faa" containerName="heat-api" containerID="cri-o://339ee0eb9d2cdb95d4c179b15ee534f418a9351af26be753336c842e5cf078a7" gracePeriod=60 Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.668860 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-c5884cff9-92mmk" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.686330 4809 scope.go:117] "RemoveContainer" containerID="0e141b2eb33bb6a66514c833e42f8eab07fd7ec72bfeb1c8fd8ac82b13c0ceaf" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.694941 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" podUID="e7306bf5-ea9b-4121-a0b0-af1603503993" containerName="heat-cfnapi" containerID="cri-o://a59fca5737a622196ceebd6d438eeda454ba0f2ff17ef0c3e5bf3beda9e0b644" gracePeriod=60 Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.697222 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-c5884cff9-92mmk" podStartSLOduration=3.300998493 podStartE2EDuration="13.697211671s" podCreationTimestamp="2025-09-30 00:30:40 +0000 UTC" firstStartedPulling="2025-09-30 00:30:42.142699189 +0000 UTC m=+1293.178948597" lastFinishedPulling="2025-09-30 00:30:52.538912367 +0000 UTC m=+1303.575161775" observedRunningTime="2025-09-30 00:30:53.696003749 +0000 UTC m=+1304.732253157" watchObservedRunningTime="2025-09-30 00:30:53.697211671 +0000 UTC m=+1304.733461079" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.819879 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" podStartSLOduration=3.257847998 podStartE2EDuration="13.819861031s" podCreationTimestamp="2025-09-30 00:30:40 +0000 UTC" firstStartedPulling="2025-09-30 00:30:42.004747452 +0000 UTC m=+1293.040996860" lastFinishedPulling="2025-09-30 00:30:52.566760485 +0000 UTC m=+1303.603009893" observedRunningTime="2025-09-30 00:30:53.740775758 +0000 UTC m=+1304.777025166" watchObservedRunningTime="2025-09-30 00:30:53.819861031 +0000 UTC m=+1304.856110439" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.825034 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.825069 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" event={"ID":"e7306bf5-ea9b-4121-a0b0-af1603503993","Type":"ContainerStarted","Data":"a59fca5737a622196ceebd6d438eeda454ba0f2ff17ef0c3e5bf3beda9e0b644"} Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.825094 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-84cd7bbbc9-7cs5m" event={"ID":"37e852d4-1ac6-4168-b9c5-1a3cab13a676","Type":"ContainerStarted","Data":"523d2344771897b555d6eaa93827a499b9ad32e6951eb5fc3e4f108c7e4935f8"} Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.825518 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.825552 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-6xtp8"] Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.825567 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"96d0165f-2a62-4c67-b140-0073b5ef59a0","Type":"ContainerStarted","Data":"b61ad7998fdb7e3c28c1b135243d9900686e83c351c46a4e9775377f0c8d1482"} Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.825593 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5588ead-01ab-4225-bdf0-17270bff107a","Type":"ContainerDied","Data":"8ac0d15adae056e478a7c5cf6da435151d4fe375f99d9397c0d8d144d4617438"} Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.825612 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-6xtp8"] Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.837792 4809 scope.go:117] "RemoveContainer" containerID="9a8edb52e80b8ccba70418b15f50bb39fdddb2169416de42242c90501a80ca66" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.872916 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-7c4c5654b8-svs85"] Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.902940 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-7cc584f69d-tt2h7"] Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.922176 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.641276447 podStartE2EDuration="16.922158286s" podCreationTimestamp="2025-09-30 00:30:37 +0000 UTC" firstStartedPulling="2025-09-30 00:30:38.279859852 +0000 UTC m=+1289.316109260" lastFinishedPulling="2025-09-30 00:30:52.560741691 +0000 UTC m=+1303.596991099" observedRunningTime="2025-09-30 00:30:53.824356333 +0000 UTC m=+1304.860605761" watchObservedRunningTime="2025-09-30 00:30:53.922158286 +0000 UTC m=+1304.958407694" Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.969215 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:30:53 crc kubenswrapper[4809]: I0930 00:30:53.977860 4809 scope.go:117] "RemoveContainer" containerID="b0f6b297d991b755574922aa95c689560b0a91ef36ce142e30db7ca988566e15" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.028583 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.092693 4809 scope.go:117] "RemoveContainer" containerID="3ad6813185ae2f11af12622feed0a1aef0044298c4c2250e9a57a37084bf82d3" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.095804 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-76dd7988df-8d5vk"] Sep 30 00:30:54 crc kubenswrapper[4809]: W0930 00:30:54.105690 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9152f576_abed_4cb6_97ee_3a4efe2ba97f.slice/crio-c52c88a342f3595466fa8fc1885fa469f64ae7745e2082103befffa0fed24367 WatchSource:0}: Error finding container c52c88a342f3595466fa8fc1885fa469f64ae7745e2082103befffa0fed24367: Status 404 returned error can't find the container with id c52c88a342f3595466fa8fc1885fa469f64ae7745e2082103befffa0fed24367 Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.121750 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:30:54 crc kubenswrapper[4809]: E0930 00:30:54.122309 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5588ead-01ab-4225-bdf0-17270bff107a" containerName="sg-core" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.122330 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5588ead-01ab-4225-bdf0-17270bff107a" containerName="sg-core" Sep 30 00:30:54 crc kubenswrapper[4809]: E0930 00:30:54.122348 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6150622-6dfa-40f4-8d82-13e29dbcae08" containerName="dnsmasq-dns" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.122354 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6150622-6dfa-40f4-8d82-13e29dbcae08" containerName="dnsmasq-dns" Sep 30 00:30:54 crc kubenswrapper[4809]: E0930 00:30:54.122372 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6150622-6dfa-40f4-8d82-13e29dbcae08" containerName="init" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.122378 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6150622-6dfa-40f4-8d82-13e29dbcae08" containerName="init" Sep 30 00:30:54 crc kubenswrapper[4809]: E0930 00:30:54.122415 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5588ead-01ab-4225-bdf0-17270bff107a" containerName="ceilometer-notification-agent" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.122424 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5588ead-01ab-4225-bdf0-17270bff107a" containerName="ceilometer-notification-agent" Sep 30 00:30:54 crc kubenswrapper[4809]: E0930 00:30:54.122432 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5588ead-01ab-4225-bdf0-17270bff107a" containerName="ceilometer-central-agent" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.122438 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5588ead-01ab-4225-bdf0-17270bff107a" containerName="ceilometer-central-agent" Sep 30 00:30:54 crc kubenswrapper[4809]: E0930 00:30:54.122449 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5588ead-01ab-4225-bdf0-17270bff107a" containerName="proxy-httpd" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.122455 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5588ead-01ab-4225-bdf0-17270bff107a" containerName="proxy-httpd" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.123160 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5588ead-01ab-4225-bdf0-17270bff107a" containerName="proxy-httpd" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.123216 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6150622-6dfa-40f4-8d82-13e29dbcae08" containerName="dnsmasq-dns" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.123234 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5588ead-01ab-4225-bdf0-17270bff107a" containerName="ceilometer-central-agent" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.123243 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5588ead-01ab-4225-bdf0-17270bff107a" containerName="sg-core" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.123256 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5588ead-01ab-4225-bdf0-17270bff107a" containerName="ceilometer-notification-agent" Sep 30 00:30:54 crc kubenswrapper[4809]: W0930 00:30:54.124067 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97460645_6cce_4b0f_b2a2_d80caee414cd.slice/crio-4b3aa951e8a6f1301983f0a6d619149066533964cd297fffe99dd69d49d09b50 WatchSource:0}: Error finding container 4b3aa951e8a6f1301983f0a6d619149066533964cd297fffe99dd69d49d09b50: Status 404 returned error can't find the container with id 4b3aa951e8a6f1301983f0a6d619149066533964cd297fffe99dd69d49d09b50 Sep 30 00:30:54 crc kubenswrapper[4809]: W0930 00:30:54.124520 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8de17215_f27c_4eab_a6f5_0ad924367b11.slice/crio-085fdc58d0580985a3d722c692c8172bd2a10823e0bdf9c2409da3e4b27946aa WatchSource:0}: Error finding container 085fdc58d0580985a3d722c692c8172bd2a10823e0bdf9c2409da3e4b27946aa: Status 404 returned error can't find the container with id 085fdc58d0580985a3d722c692c8172bd2a10823e0bdf9c2409da3e4b27946aa Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.125830 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.135057 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.138989 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.184867 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.194391 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-pkq57"] Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.210504 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-bh5sc"] Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.236136 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-config-data\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.236457 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.236607 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30a49cca-a01d-45fb-92a0-6957fa3c61a8-run-httpd\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.237229 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30a49cca-a01d-45fb-92a0-6957fa3c61a8-log-httpd\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.237263 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wj8v\" (UniqueName: \"kubernetes.io/projected/30a49cca-a01d-45fb-92a0-6957fa3c61a8-kube-api-access-5wj8v\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.237338 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-scripts\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.237463 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.237626 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-96wlx"] Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.278584 4809 scope.go:117] "RemoveContainer" containerID="79b576470e558a67838f1d9d0c37245031cb3921ada5cd367ecc5556835d59be" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.339141 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30a49cca-a01d-45fb-92a0-6957fa3c61a8-run-httpd\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.339286 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30a49cca-a01d-45fb-92a0-6957fa3c61a8-log-httpd\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.339320 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wj8v\" (UniqueName: \"kubernetes.io/projected/30a49cca-a01d-45fb-92a0-6957fa3c61a8-kube-api-access-5wj8v\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.339366 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-scripts\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.339395 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.339440 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-config-data\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.339488 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.340292 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30a49cca-a01d-45fb-92a0-6957fa3c61a8-run-httpd\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.340417 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30a49cca-a01d-45fb-92a0-6957fa3c61a8-log-httpd\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.346972 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-scripts\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.349094 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-config-data\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.352126 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.358306 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wj8v\" (UniqueName: \"kubernetes.io/projected/30a49cca-a01d-45fb-92a0-6957fa3c61a8-kube-api-access-5wj8v\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.363400 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " pod="openstack/ceilometer-0" Sep 30 00:30:54 crc kubenswrapper[4809]: I0930 00:30:54.633741 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:54.861448 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-7c4c5654b8-svs85" event={"ID":"e8a858b6-8b7c-4a4d-805f-d129492e0f57","Type":"ContainerStarted","Data":"694d2454da6af0628f339c103a664053b9eda9edb903e28f81fb3dc8ba64c001"} Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:54.866377 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" event={"ID":"9bc06dad-a168-4690-ab0d-5c14b05ef072","Type":"ContainerStarted","Data":"25e809d98276d7d0adf98b4dff83740e552d2d40912ee8088797b659c1ff197d"} Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:54.868052 4809 generic.go:334] "Generic (PLEG): container finished" podID="e7306bf5-ea9b-4121-a0b0-af1603503993" containerID="a59fca5737a622196ceebd6d438eeda454ba0f2ff17ef0c3e5bf3beda9e0b644" exitCode=0 Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:54.868094 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" event={"ID":"e7306bf5-ea9b-4121-a0b0-af1603503993","Type":"ContainerDied","Data":"a59fca5737a622196ceebd6d438eeda454ba0f2ff17ef0c3e5bf3beda9e0b644"} Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:54.878384 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-96wlx" event={"ID":"8de17215-f27c-4eab-a6f5-0ad924367b11","Type":"ContainerStarted","Data":"085fdc58d0580985a3d722c692c8172bd2a10823e0bdf9c2409da3e4b27946aa"} Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:54.881101 4809 generic.go:334] "Generic (PLEG): container finished" podID="b17561b2-81f8-4de9-817f-6e88f05730a9" containerID="2ebdeda5f84dc4bfd7a48c8153845b841aa2fd36f4affa8382beec9ff92bef9d" exitCode=137 Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:54.881193 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b17561b2-81f8-4de9-817f-6e88f05730a9","Type":"ContainerDied","Data":"2ebdeda5f84dc4bfd7a48c8153845b841aa2fd36f4affa8382beec9ff92bef9d"} Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:54.882198 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-bh5sc" event={"ID":"97460645-6cce-4b0f-b2a2-d80caee414cd","Type":"ContainerStarted","Data":"4b3aa951e8a6f1301983f0a6d619149066533964cd297fffe99dd69d49d09b50"} Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:54.894906 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-c5d74679-msnb4" event={"ID":"b84f683b-2b54-4f11-a13a-104543f646a3","Type":"ContainerStarted","Data":"15c9ed5312385bab468671de140eaa891467369fef5ecfb341ed0f0bbbf52552"} Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:54.896760 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:54.899288 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-pkq57" event={"ID":"9152f576-abed-4cb6-97ee-3a4efe2ba97f","Type":"ContainerStarted","Data":"c52c88a342f3595466fa8fc1885fa469f64ae7745e2082103befffa0fed24367"} Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:54.903006 4809 generic.go:334] "Generic (PLEG): container finished" podID="37e852d4-1ac6-4168-b9c5-1a3cab13a676" containerID="66a60c5156e2a548d74a14b1283846c5a7b84688ce0cbb86c2151a9d04f3ecdf" exitCode=1 Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:54.903104 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-84cd7bbbc9-7cs5m" event={"ID":"37e852d4-1ac6-4168-b9c5-1a3cab13a676","Type":"ContainerDied","Data":"66a60c5156e2a548d74a14b1283846c5a7b84688ce0cbb86c2151a9d04f3ecdf"} Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:54.940926 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-76dd7988df-8d5vk" event={"ID":"0f29e497-2911-4dc0-8a6d-b59c8d254f60","Type":"ContainerStarted","Data":"56b679fa5a280babfae86e401ad107c96d40a1134e05cff0af1c2f90df27aaf1"} Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:54.978421 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-c5d74679-msnb4" podStartSLOduration=4.978399292 podStartE2EDuration="4.978399292s" podCreationTimestamp="2025-09-30 00:30:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:30:54.941216019 +0000 UTC m=+1305.977465427" watchObservedRunningTime="2025-09-30 00:30:54.978399292 +0000 UTC m=+1306.014648700" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:54.995339 4809 scope.go:117] "RemoveContainer" containerID="66a60c5156e2a548d74a14b1283846c5a7b84688ce0cbb86c2151a9d04f3ecdf" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.096598 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.156354 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.175753 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-scripts\") pod \"b17561b2-81f8-4de9-817f-6e88f05730a9\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.175870 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b17561b2-81f8-4de9-817f-6e88f05730a9-logs\") pod \"b17561b2-81f8-4de9-817f-6e88f05730a9\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.175901 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-config-data-custom\") pod \"b17561b2-81f8-4de9-817f-6e88f05730a9\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.175921 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-combined-ca-bundle\") pod \"b17561b2-81f8-4de9-817f-6e88f05730a9\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.175942 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b17561b2-81f8-4de9-817f-6e88f05730a9-etc-machine-id\") pod \"b17561b2-81f8-4de9-817f-6e88f05730a9\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.176026 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-config-data\") pod \"b17561b2-81f8-4de9-817f-6e88f05730a9\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.176113 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqp6k\" (UniqueName: \"kubernetes.io/projected/b17561b2-81f8-4de9-817f-6e88f05730a9-kube-api-access-vqp6k\") pod \"b17561b2-81f8-4de9-817f-6e88f05730a9\" (UID: \"b17561b2-81f8-4de9-817f-6e88f05730a9\") " Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.176495 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b17561b2-81f8-4de9-817f-6e88f05730a9-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b17561b2-81f8-4de9-817f-6e88f05730a9" (UID: "b17561b2-81f8-4de9-817f-6e88f05730a9"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.176816 4809 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b17561b2-81f8-4de9-817f-6e88f05730a9-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.180427 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b17561b2-81f8-4de9-817f-6e88f05730a9-logs" (OuterVolumeSpecName: "logs") pod "b17561b2-81f8-4de9-817f-6e88f05730a9" (UID: "b17561b2-81f8-4de9-817f-6e88f05730a9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.210312 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b17561b2-81f8-4de9-817f-6e88f05730a9" (UID: "b17561b2-81f8-4de9-817f-6e88f05730a9"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.222255 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b17561b2-81f8-4de9-817f-6e88f05730a9-kube-api-access-vqp6k" (OuterVolumeSpecName: "kube-api-access-vqp6k") pod "b17561b2-81f8-4de9-817f-6e88f05730a9" (UID: "b17561b2-81f8-4de9-817f-6e88f05730a9"). InnerVolumeSpecName "kube-api-access-vqp6k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.250867 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-scripts" (OuterVolumeSpecName: "scripts") pod "b17561b2-81f8-4de9-817f-6e88f05730a9" (UID: "b17561b2-81f8-4de9-817f-6e88f05730a9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.280765 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7306bf5-ea9b-4121-a0b0-af1603503993-combined-ca-bundle\") pod \"e7306bf5-ea9b-4121-a0b0-af1603503993\" (UID: \"e7306bf5-ea9b-4121-a0b0-af1603503993\") " Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.280835 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7306bf5-ea9b-4121-a0b0-af1603503993-config-data\") pod \"e7306bf5-ea9b-4121-a0b0-af1603503993\" (UID: \"e7306bf5-ea9b-4121-a0b0-af1603503993\") " Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.280928 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lg5f2\" (UniqueName: \"kubernetes.io/projected/e7306bf5-ea9b-4121-a0b0-af1603503993-kube-api-access-lg5f2\") pod \"e7306bf5-ea9b-4121-a0b0-af1603503993\" (UID: \"e7306bf5-ea9b-4121-a0b0-af1603503993\") " Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.281096 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e7306bf5-ea9b-4121-a0b0-af1603503993-config-data-custom\") pod \"e7306bf5-ea9b-4121-a0b0-af1603503993\" (UID: \"e7306bf5-ea9b-4121-a0b0-af1603503993\") " Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.281535 4809 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b17561b2-81f8-4de9-817f-6e88f05730a9-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.281552 4809 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.281560 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqp6k\" (UniqueName: \"kubernetes.io/projected/b17561b2-81f8-4de9-817f-6e88f05730a9-kube-api-access-vqp6k\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.281569 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.321509 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7306bf5-ea9b-4121-a0b0-af1603503993-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e7306bf5-ea9b-4121-a0b0-af1603503993" (UID: "e7306bf5-ea9b-4121-a0b0-af1603503993"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.321933 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7306bf5-ea9b-4121-a0b0-af1603503993-kube-api-access-lg5f2" (OuterVolumeSpecName: "kube-api-access-lg5f2") pod "e7306bf5-ea9b-4121-a0b0-af1603503993" (UID: "e7306bf5-ea9b-4121-a0b0-af1603503993"). InnerVolumeSpecName "kube-api-access-lg5f2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.324391 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.324444 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.383755 4809 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e7306bf5-ea9b-4121-a0b0-af1603503993-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.383787 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lg5f2\" (UniqueName: \"kubernetes.io/projected/e7306bf5-ea9b-4121-a0b0-af1603503993-kube-api-access-lg5f2\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.447260 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b17561b2-81f8-4de9-817f-6e88f05730a9" (UID: "b17561b2-81f8-4de9-817f-6e88f05730a9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.460950 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7306bf5-ea9b-4121-a0b0-af1603503993-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e7306bf5-ea9b-4121-a0b0-af1603503993" (UID: "e7306bf5-ea9b-4121-a0b0-af1603503993"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.489131 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.489161 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7306bf5-ea9b-4121-a0b0-af1603503993-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.508577 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.570783 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7306bf5-ea9b-4121-a0b0-af1603503993-config-data" (OuterVolumeSpecName: "config-data") pod "e7306bf5-ea9b-4121-a0b0-af1603503993" (UID: "e7306bf5-ea9b-4121-a0b0-af1603503993"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.598995 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7306bf5-ea9b-4121-a0b0-af1603503993-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.632853 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-config-data" (OuterVolumeSpecName: "config-data") pod "b17561b2-81f8-4de9-817f-6e88f05730a9" (UID: "b17561b2-81f8-4de9-817f-6e88f05730a9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.700910 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b17561b2-81f8-4de9-817f-6e88f05730a9-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.714601 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5588ead-01ab-4225-bdf0-17270bff107a" path="/var/lib/kubelet/pods/a5588ead-01ab-4225-bdf0-17270bff107a/volumes" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.715665 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6150622-6dfa-40f4-8d82-13e29dbcae08" path="/var/lib/kubelet/pods/c6150622-6dfa-40f4-8d82-13e29dbcae08/volumes" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.954062 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-84cd7bbbc9-7cs5m" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.956068 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.956081 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-6949b76cf4-kw8lk" event={"ID":"e7306bf5-ea9b-4121-a0b0-af1603503993","Type":"ContainerDied","Data":"69146bdea159355074245d7f4a5d9d2e5c1d0298bda123c4b6254db14e18b767"} Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.956126 4809 scope.go:117] "RemoveContainer" containerID="a59fca5737a622196ceebd6d438eeda454ba0f2ff17ef0c3e5bf3beda9e0b644" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.963172 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-76dd7988df-8d5vk" event={"ID":"0f29e497-2911-4dc0-8a6d-b59c8d254f60","Type":"ContainerStarted","Data":"fceb9343784b244ac7ab8d16af427f94354ee9025181d4be209bd9f7e858e04a"} Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.963382 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.965767 4809 generic.go:334] "Generic (PLEG): container finished" podID="8de17215-f27c-4eab-a6f5-0ad924367b11" containerID="505d37050ffa3c58311c89ad872b639b21b9eaf4982d541a778c92bae41bbe55" exitCode=0 Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.967456 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-96wlx" event={"ID":"8de17215-f27c-4eab-a6f5-0ad924367b11","Type":"ContainerDied","Data":"505d37050ffa3c58311c89ad872b639b21b9eaf4982d541a778c92bae41bbe55"} Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.974126 4809 generic.go:334] "Generic (PLEG): container finished" podID="9152f576-abed-4cb6-97ee-3a4efe2ba97f" containerID="57e5168e5b543a436549303cbf394fff2ab5613e0ba9beb7b161a1de79c59663" exitCode=0 Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.974211 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-pkq57" event={"ID":"9152f576-abed-4cb6-97ee-3a4efe2ba97f","Type":"ContainerDied","Data":"57e5168e5b543a436549303cbf394fff2ab5613e0ba9beb7b161a1de79c59663"} Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.976851 4809 generic.go:334] "Generic (PLEG): container finished" podID="97460645-6cce-4b0f-b2a2-d80caee414cd" containerID="607a49a569b789b5f538ded9881802b35ad84bafd4c38d0c84fb4d0ed10f3fb9" exitCode=0 Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.976916 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-bh5sc" event={"ID":"97460645-6cce-4b0f-b2a2-d80caee414cd","Type":"ContainerDied","Data":"607a49a569b789b5f538ded9881802b35ad84bafd4c38d0c84fb4d0ed10f3fb9"} Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.984028 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30a49cca-a01d-45fb-92a0-6957fa3c61a8","Type":"ContainerStarted","Data":"5c9816cc90a035f0731ac2f290dd00d65f1938d0084f9f9dacb7d43f1946618a"} Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.988335 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-84cd7bbbc9-7cs5m" podStartSLOduration=7.9883160669999995 podStartE2EDuration="7.988316067s" podCreationTimestamp="2025-09-30 00:30:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:30:55.979002374 +0000 UTC m=+1307.015251782" watchObservedRunningTime="2025-09-30 00:30:55.988316067 +0000 UTC m=+1307.024565475" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.992767 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" event={"ID":"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42","Type":"ContainerStarted","Data":"28d28088a4a4dc0b27931fbd9ce3bde6090ed7cd1da8520133241733347a6722"} Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.992885 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.996249 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b17561b2-81f8-4de9-817f-6e88f05730a9","Type":"ContainerDied","Data":"e0eb1d7e32a3182e20ba65922ba45ee9d9f904e8fb5d98002e708095533f2558"} Sep 30 00:30:55 crc kubenswrapper[4809]: I0930 00:30:55.996266 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.002281 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-7c4c5654b8-svs85" event={"ID":"e8a858b6-8b7c-4a4d-805f-d129492e0f57","Type":"ContainerStarted","Data":"0096310072aa35a782406e714dbb8512f9ff7d94f7fe4733e322dd3a68ce3bf1"} Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.002372 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-7c4c5654b8-svs85" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.006918 4809 generic.go:334] "Generic (PLEG): container finished" podID="9bc06dad-a168-4690-ab0d-5c14b05ef072" containerID="2a3ac77883e1e35888fd5a066e12fa1c807b1737b4f41fe38fcde75ad8297790" exitCode=1 Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.006982 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" event={"ID":"9bc06dad-a168-4690-ab0d-5c14b05ef072","Type":"ContainerDied","Data":"2a3ac77883e1e35888fd5a066e12fa1c807b1737b4f41fe38fcde75ad8297790"} Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.007307 4809 scope.go:117] "RemoveContainer" containerID="2a3ac77883e1e35888fd5a066e12fa1c807b1737b4f41fe38fcde75ad8297790" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.019704 4809 scope.go:117] "RemoveContainer" containerID="2ebdeda5f84dc4bfd7a48c8153845b841aa2fd36f4affa8382beec9ff92bef9d" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.071837 4809 scope.go:117] "RemoveContainer" containerID="ca59482742bda5cf04d6795e8bce21b07a0e8f69b686a1363dbc7e7e6f421349" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.080248 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-76dd7988df-8d5vk" podStartSLOduration=9.080224029 podStartE2EDuration="9.080224029s" podCreationTimestamp="2025-09-30 00:30:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:30:56.035197564 +0000 UTC m=+1307.071446982" watchObservedRunningTime="2025-09-30 00:30:56.080224029 +0000 UTC m=+1307.116473437" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.145752 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" podStartSLOduration=6.145731163 podStartE2EDuration="6.145731163s" podCreationTimestamp="2025-09-30 00:30:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:30:56.072136809 +0000 UTC m=+1307.108386227" watchObservedRunningTime="2025-09-30 00:30:56.145731163 +0000 UTC m=+1307.181980571" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.181369 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.225402 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-7c4c5654b8-svs85" podStartSLOduration=8.22532938 podStartE2EDuration="8.22532938s" podCreationTimestamp="2025-09-30 00:30:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:30:56.132439201 +0000 UTC m=+1307.168688609" watchObservedRunningTime="2025-09-30 00:30:56.22532938 +0000 UTC m=+1307.261578798" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.229776 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.256949 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 30 00:30:56 crc kubenswrapper[4809]: E0930 00:30:56.257359 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b17561b2-81f8-4de9-817f-6e88f05730a9" containerName="cinder-api-log" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.257373 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="b17561b2-81f8-4de9-817f-6e88f05730a9" containerName="cinder-api-log" Sep 30 00:30:56 crc kubenswrapper[4809]: E0930 00:30:56.257397 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7306bf5-ea9b-4121-a0b0-af1603503993" containerName="heat-cfnapi" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.257404 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7306bf5-ea9b-4121-a0b0-af1603503993" containerName="heat-cfnapi" Sep 30 00:30:56 crc kubenswrapper[4809]: E0930 00:30:56.257443 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b17561b2-81f8-4de9-817f-6e88f05730a9" containerName="cinder-api" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.257455 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="b17561b2-81f8-4de9-817f-6e88f05730a9" containerName="cinder-api" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.257687 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="b17561b2-81f8-4de9-817f-6e88f05730a9" containerName="cinder-api-log" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.257705 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="b17561b2-81f8-4de9-817f-6e88f05730a9" containerName="cinder-api" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.257727 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7306bf5-ea9b-4121-a0b0-af1603503993" containerName="heat-cfnapi" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.259281 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.267066 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.267292 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.267430 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.272669 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.281772 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-6949b76cf4-kw8lk"] Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.294411 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-cfnapi-6949b76cf4-kw8lk"] Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.333542 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/965477a4-d575-4c80-826b-5ac22f3bfee3-config-data-custom\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.333801 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/965477a4-d575-4c80-826b-5ac22f3bfee3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.333921 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/965477a4-d575-4c80-826b-5ac22f3bfee3-public-tls-certs\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.334006 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/965477a4-d575-4c80-826b-5ac22f3bfee3-scripts\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.334178 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/965477a4-d575-4c80-826b-5ac22f3bfee3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.334256 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/965477a4-d575-4c80-826b-5ac22f3bfee3-config-data\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.334324 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/965477a4-d575-4c80-826b-5ac22f3bfee3-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.334448 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckn9z\" (UniqueName: \"kubernetes.io/projected/965477a4-d575-4c80-826b-5ac22f3bfee3-kube-api-access-ckn9z\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.334534 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/965477a4-d575-4c80-826b-5ac22f3bfee3-logs\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.436087 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/965477a4-d575-4c80-826b-5ac22f3bfee3-logs\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.436183 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/965477a4-d575-4c80-826b-5ac22f3bfee3-config-data-custom\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.436316 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/965477a4-d575-4c80-826b-5ac22f3bfee3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.436788 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/965477a4-d575-4c80-826b-5ac22f3bfee3-public-tls-certs\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.437038 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/965477a4-d575-4c80-826b-5ac22f3bfee3-scripts\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.437095 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/965477a4-d575-4c80-826b-5ac22f3bfee3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.437150 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/965477a4-d575-4c80-826b-5ac22f3bfee3-config-data\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.437167 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/965477a4-d575-4c80-826b-5ac22f3bfee3-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.437244 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckn9z\" (UniqueName: \"kubernetes.io/projected/965477a4-d575-4c80-826b-5ac22f3bfee3-kube-api-access-ckn9z\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.436408 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/965477a4-d575-4c80-826b-5ac22f3bfee3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.436617 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/965477a4-d575-4c80-826b-5ac22f3bfee3-logs\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.439785 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/965477a4-d575-4c80-826b-5ac22f3bfee3-config-data-custom\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.443354 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/965477a4-d575-4c80-826b-5ac22f3bfee3-scripts\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.444163 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/965477a4-d575-4c80-826b-5ac22f3bfee3-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.444797 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/965477a4-d575-4c80-826b-5ac22f3bfee3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.446207 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/965477a4-d575-4c80-826b-5ac22f3bfee3-public-tls-certs\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.450578 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/965477a4-d575-4c80-826b-5ac22f3bfee3-config-data\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.470872 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckn9z\" (UniqueName: \"kubernetes.io/projected/965477a4-d575-4c80-826b-5ac22f3bfee3-kube-api-access-ckn9z\") pod \"cinder-api-0\" (UID: \"965477a4-d575-4c80-826b-5ac22f3bfee3\") " pod="openstack/cinder-api-0" Sep 30 00:30:56 crc kubenswrapper[4809]: I0930 00:30:56.698947 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 00:30:57 crc kubenswrapper[4809]: I0930 00:30:57.021810 4809 generic.go:334] "Generic (PLEG): container finished" podID="9bc06dad-a168-4690-ab0d-5c14b05ef072" containerID="c8125d5b6d309ac0e059d2630cdc8fa69c5abc118dd223d500ae49248184444b" exitCode=1 Sep 30 00:30:57 crc kubenswrapper[4809]: I0930 00:30:57.021900 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" event={"ID":"9bc06dad-a168-4690-ab0d-5c14b05ef072","Type":"ContainerDied","Data":"c8125d5b6d309ac0e059d2630cdc8fa69c5abc118dd223d500ae49248184444b"} Sep 30 00:30:57 crc kubenswrapper[4809]: I0930 00:30:57.022311 4809 scope.go:117] "RemoveContainer" containerID="2a3ac77883e1e35888fd5a066e12fa1c807b1737b4f41fe38fcde75ad8297790" Sep 30 00:30:57 crc kubenswrapper[4809]: I0930 00:30:57.022389 4809 scope.go:117] "RemoveContainer" containerID="c8125d5b6d309ac0e059d2630cdc8fa69c5abc118dd223d500ae49248184444b" Sep 30 00:30:57 crc kubenswrapper[4809]: E0930 00:30:57.022706 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-cfnapi\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-cfnapi pod=heat-cfnapi-7cc584f69d-tt2h7_openstack(9bc06dad-a168-4690-ab0d-5c14b05ef072)\"" pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" podUID="9bc06dad-a168-4690-ab0d-5c14b05ef072" Sep 30 00:30:57 crc kubenswrapper[4809]: I0930 00:30:57.027222 4809 generic.go:334] "Generic (PLEG): container finished" podID="37e852d4-1ac6-4168-b9c5-1a3cab13a676" containerID="1beb5e23176fd0bf97f216ee81ebd3a771c6ab6d654f17030bbea4ced6dd2d55" exitCode=1 Sep 30 00:30:57 crc kubenswrapper[4809]: I0930 00:30:57.027321 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-84cd7bbbc9-7cs5m" event={"ID":"37e852d4-1ac6-4168-b9c5-1a3cab13a676","Type":"ContainerDied","Data":"1beb5e23176fd0bf97f216ee81ebd3a771c6ab6d654f17030bbea4ced6dd2d55"} Sep 30 00:30:57 crc kubenswrapper[4809]: I0930 00:30:57.027869 4809 scope.go:117] "RemoveContainer" containerID="1beb5e23176fd0bf97f216ee81ebd3a771c6ab6d654f17030bbea4ced6dd2d55" Sep 30 00:30:57 crc kubenswrapper[4809]: E0930 00:30:57.028147 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-api pod=heat-api-84cd7bbbc9-7cs5m_openstack(37e852d4-1ac6-4168-b9c5-1a3cab13a676)\"" pod="openstack/heat-api-84cd7bbbc9-7cs5m" podUID="37e852d4-1ac6-4168-b9c5-1a3cab13a676" Sep 30 00:30:57 crc kubenswrapper[4809]: I0930 00:30:57.047264 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30a49cca-a01d-45fb-92a0-6957fa3c61a8","Type":"ContainerStarted","Data":"75f909ea76fd2521468cb8373fe50fdfbeee4cd31af8f517d3e96e2f70a4ded0"} Sep 30 00:30:57 crc kubenswrapper[4809]: I0930 00:30:57.063781 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-76dd7988df-8d5vk" event={"ID":"0f29e497-2911-4dc0-8a6d-b59c8d254f60","Type":"ContainerStarted","Data":"a9154f5a95657429ce36f6073813723e832d2b6751b7c1ef7845504ea24eb59d"} Sep 30 00:30:57 crc kubenswrapper[4809]: I0930 00:30:57.063911 4809 scope.go:117] "RemoveContainer" containerID="66a60c5156e2a548d74a14b1283846c5a7b84688ce0cbb86c2151a9d04f3ecdf" Sep 30 00:30:57 crc kubenswrapper[4809]: I0930 00:30:57.064023 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:30:57 crc kubenswrapper[4809]: I0930 00:30:57.185360 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 00:30:57 crc kubenswrapper[4809]: I0930 00:30:57.704972 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b17561b2-81f8-4de9-817f-6e88f05730a9" path="/var/lib/kubelet/pods/b17561b2-81f8-4de9-817f-6e88f05730a9/volumes" Sep 30 00:30:57 crc kubenswrapper[4809]: I0930 00:30:57.709493 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7306bf5-ea9b-4121-a0b0-af1603503993" path="/var/lib/kubelet/pods/e7306bf5-ea9b-4121-a0b0-af1603503993/volumes" Sep 30 00:30:57 crc kubenswrapper[4809]: I0930 00:30:57.872392 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-bh5sc" Sep 30 00:30:57 crc kubenswrapper[4809]: I0930 00:30:57.992779 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k54mj\" (UniqueName: \"kubernetes.io/projected/97460645-6cce-4b0f-b2a2-d80caee414cd-kube-api-access-k54mj\") pod \"97460645-6cce-4b0f-b2a2-d80caee414cd\" (UID: \"97460645-6cce-4b0f-b2a2-d80caee414cd\") " Sep 30 00:30:58 crc kubenswrapper[4809]: I0930 00:30:58.012903 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97460645-6cce-4b0f-b2a2-d80caee414cd-kube-api-access-k54mj" (OuterVolumeSpecName: "kube-api-access-k54mj") pod "97460645-6cce-4b0f-b2a2-d80caee414cd" (UID: "97460645-6cce-4b0f-b2a2-d80caee414cd"). InnerVolumeSpecName "kube-api-access-k54mj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:58 crc kubenswrapper[4809]: I0930 00:30:58.098053 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k54mj\" (UniqueName: \"kubernetes.io/projected/97460645-6cce-4b0f-b2a2-d80caee414cd-kube-api-access-k54mj\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:58 crc kubenswrapper[4809]: I0930 00:30:58.147405 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"965477a4-d575-4c80-826b-5ac22f3bfee3","Type":"ContainerStarted","Data":"b7e20cf1367a7c72979b72e4a2830258097aa129048725a37ce067208107b51e"} Sep 30 00:30:58 crc kubenswrapper[4809]: I0930 00:30:58.155557 4809 scope.go:117] "RemoveContainer" containerID="c8125d5b6d309ac0e059d2630cdc8fa69c5abc118dd223d500ae49248184444b" Sep 30 00:30:58 crc kubenswrapper[4809]: E0930 00:30:58.155889 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-cfnapi\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-cfnapi pod=heat-cfnapi-7cc584f69d-tt2h7_openstack(9bc06dad-a168-4690-ab0d-5c14b05ef072)\"" pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" podUID="9bc06dad-a168-4690-ab0d-5c14b05ef072" Sep 30 00:30:58 crc kubenswrapper[4809]: I0930 00:30:58.177454 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-96wlx" Sep 30 00:30:58 crc kubenswrapper[4809]: I0930 00:30:58.178245 4809 scope.go:117] "RemoveContainer" containerID="1beb5e23176fd0bf97f216ee81ebd3a771c6ab6d654f17030bbea4ced6dd2d55" Sep 30 00:30:58 crc kubenswrapper[4809]: E0930 00:30:58.178493 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-api pod=heat-api-84cd7bbbc9-7cs5m_openstack(37e852d4-1ac6-4168-b9c5-1a3cab13a676)\"" pod="openstack/heat-api-84cd7bbbc9-7cs5m" podUID="37e852d4-1ac6-4168-b9c5-1a3cab13a676" Sep 30 00:30:58 crc kubenswrapper[4809]: I0930 00:30:58.179545 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-bh5sc" Sep 30 00:30:58 crc kubenswrapper[4809]: I0930 00:30:58.179563 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-bh5sc" event={"ID":"97460645-6cce-4b0f-b2a2-d80caee414cd","Type":"ContainerDied","Data":"4b3aa951e8a6f1301983f0a6d619149066533964cd297fffe99dd69d49d09b50"} Sep 30 00:30:58 crc kubenswrapper[4809]: I0930 00:30:58.179606 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b3aa951e8a6f1301983f0a6d619149066533964cd297fffe99dd69d49d09b50" Sep 30 00:30:58 crc kubenswrapper[4809]: I0930 00:30:58.206504 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30a49cca-a01d-45fb-92a0-6957fa3c61a8","Type":"ContainerStarted","Data":"9ae4c4c6a62a9a2f10bc821ef242ba1125325d99df63b0be12288fe453692807"} Sep 30 00:30:58 crc kubenswrapper[4809]: I0930 00:30:58.306427 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zs8m\" (UniqueName: \"kubernetes.io/projected/8de17215-f27c-4eab-a6f5-0ad924367b11-kube-api-access-4zs8m\") pod \"8de17215-f27c-4eab-a6f5-0ad924367b11\" (UID: \"8de17215-f27c-4eab-a6f5-0ad924367b11\") " Sep 30 00:30:58 crc kubenswrapper[4809]: I0930 00:30:58.314911 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8de17215-f27c-4eab-a6f5-0ad924367b11-kube-api-access-4zs8m" (OuterVolumeSpecName: "kube-api-access-4zs8m") pod "8de17215-f27c-4eab-a6f5-0ad924367b11" (UID: "8de17215-f27c-4eab-a6f5-0ad924367b11"). InnerVolumeSpecName "kube-api-access-4zs8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:58 crc kubenswrapper[4809]: I0930 00:30:58.387028 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-pkq57" Sep 30 00:30:58 crc kubenswrapper[4809]: I0930 00:30:58.410784 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zs8m\" (UniqueName: \"kubernetes.io/projected/8de17215-f27c-4eab-a6f5-0ad924367b11-kube-api-access-4zs8m\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:58 crc kubenswrapper[4809]: I0930 00:30:58.512326 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvkh6\" (UniqueName: \"kubernetes.io/projected/9152f576-abed-4cb6-97ee-3a4efe2ba97f-kube-api-access-rvkh6\") pod \"9152f576-abed-4cb6-97ee-3a4efe2ba97f\" (UID: \"9152f576-abed-4cb6-97ee-3a4efe2ba97f\") " Sep 30 00:30:58 crc kubenswrapper[4809]: I0930 00:30:58.527955 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9152f576-abed-4cb6-97ee-3a4efe2ba97f-kube-api-access-rvkh6" (OuterVolumeSpecName: "kube-api-access-rvkh6") pod "9152f576-abed-4cb6-97ee-3a4efe2ba97f" (UID: "9152f576-abed-4cb6-97ee-3a4efe2ba97f"). InnerVolumeSpecName "kube-api-access-rvkh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:58 crc kubenswrapper[4809]: I0930 00:30:58.615184 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvkh6\" (UniqueName: \"kubernetes.io/projected/9152f576-abed-4cb6-97ee-3a4efe2ba97f-kube-api-access-rvkh6\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:59 crc kubenswrapper[4809]: I0930 00:30:59.101756 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" Sep 30 00:30:59 crc kubenswrapper[4809]: I0930 00:30:59.101804 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" Sep 30 00:30:59 crc kubenswrapper[4809]: I0930 00:30:59.135379 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/heat-api-84cd7bbbc9-7cs5m" Sep 30 00:30:59 crc kubenswrapper[4809]: I0930 00:30:59.215408 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-pkq57" Sep 30 00:30:59 crc kubenswrapper[4809]: I0930 00:30:59.215405 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-pkq57" event={"ID":"9152f576-abed-4cb6-97ee-3a4efe2ba97f","Type":"ContainerDied","Data":"c52c88a342f3595466fa8fc1885fa469f64ae7745e2082103befffa0fed24367"} Sep 30 00:30:59 crc kubenswrapper[4809]: I0930 00:30:59.215722 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c52c88a342f3595466fa8fc1885fa469f64ae7745e2082103befffa0fed24367" Sep 30 00:30:59 crc kubenswrapper[4809]: I0930 00:30:59.216997 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-96wlx" event={"ID":"8de17215-f27c-4eab-a6f5-0ad924367b11","Type":"ContainerDied","Data":"085fdc58d0580985a3d722c692c8172bd2a10823e0bdf9c2409da3e4b27946aa"} Sep 30 00:30:59 crc kubenswrapper[4809]: I0930 00:30:59.217054 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="085fdc58d0580985a3d722c692c8172bd2a10823e0bdf9c2409da3e4b27946aa" Sep 30 00:30:59 crc kubenswrapper[4809]: I0930 00:30:59.217329 4809 scope.go:117] "RemoveContainer" containerID="1beb5e23176fd0bf97f216ee81ebd3a771c6ab6d654f17030bbea4ced6dd2d55" Sep 30 00:30:59 crc kubenswrapper[4809]: E0930 00:30:59.217606 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-api pod=heat-api-84cd7bbbc9-7cs5m_openstack(37e852d4-1ac6-4168-b9c5-1a3cab13a676)\"" pod="openstack/heat-api-84cd7bbbc9-7cs5m" podUID="37e852d4-1ac6-4168-b9c5-1a3cab13a676" Sep 30 00:30:59 crc kubenswrapper[4809]: I0930 00:30:59.217751 4809 scope.go:117] "RemoveContainer" containerID="c8125d5b6d309ac0e059d2630cdc8fa69c5abc118dd223d500ae49248184444b" Sep 30 00:30:59 crc kubenswrapper[4809]: I0930 00:30:59.217977 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-96wlx" Sep 30 00:30:59 crc kubenswrapper[4809]: E0930 00:30:59.218016 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-cfnapi\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-cfnapi pod=heat-cfnapi-7cc584f69d-tt2h7_openstack(9bc06dad-a168-4690-ab0d-5c14b05ef072)\"" pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" podUID="9bc06dad-a168-4690-ab0d-5c14b05ef072" Sep 30 00:30:59 crc kubenswrapper[4809]: I0930 00:30:59.445354 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:31:00 crc kubenswrapper[4809]: I0930 00:31:00.229916 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30a49cca-a01d-45fb-92a0-6957fa3c61a8","Type":"ContainerStarted","Data":"be1eee3226531d797540e824ac57441ab08b509dd72a3fe9abe3a91d0044629e"} Sep 30 00:31:00 crc kubenswrapper[4809]: I0930 00:31:00.231850 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"965477a4-d575-4c80-826b-5ac22f3bfee3","Type":"ContainerStarted","Data":"099bcea2f751843339247ecc7bffac4bd41b195199040891d45045071d8f2eae"} Sep 30 00:31:00 crc kubenswrapper[4809]: I0930 00:31:00.231875 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"965477a4-d575-4c80-826b-5ac22f3bfee3","Type":"ContainerStarted","Data":"50343797c2ec5e871d53affca23d27ff4eaae79e0bafbc732f7be7976e44d8d0"} Sep 30 00:31:00 crc kubenswrapper[4809]: I0930 00:31:00.232061 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 30 00:31:00 crc kubenswrapper[4809]: I0930 00:31:00.259810 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.2597913290000005 podStartE2EDuration="4.259791329s" podCreationTimestamp="2025-09-30 00:30:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:31:00.252554141 +0000 UTC m=+1311.288803539" watchObservedRunningTime="2025-09-30 00:31:00.259791329 +0000 UTC m=+1311.296040737" Sep 30 00:31:02 crc kubenswrapper[4809]: I0930 00:31:02.283289 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30a49cca-a01d-45fb-92a0-6957fa3c61a8","Type":"ContainerStarted","Data":"c363115972730a86a3f815220b44c70cc0fec8510724b8d5804fac3c3d5b5096"} Sep 30 00:31:02 crc kubenswrapper[4809]: I0930 00:31:02.283649 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" containerName="ceilometer-central-agent" containerID="cri-o://75f909ea76fd2521468cb8373fe50fdfbeee4cd31af8f517d3e96e2f70a4ded0" gracePeriod=30 Sep 30 00:31:02 crc kubenswrapper[4809]: I0930 00:31:02.283822 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 00:31:02 crc kubenswrapper[4809]: I0930 00:31:02.283882 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" containerName="proxy-httpd" containerID="cri-o://c363115972730a86a3f815220b44c70cc0fec8510724b8d5804fac3c3d5b5096" gracePeriod=30 Sep 30 00:31:02 crc kubenswrapper[4809]: I0930 00:31:02.283978 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" containerName="sg-core" containerID="cri-o://be1eee3226531d797540e824ac57441ab08b509dd72a3fe9abe3a91d0044629e" gracePeriod=30 Sep 30 00:31:02 crc kubenswrapper[4809]: I0930 00:31:02.284012 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" containerName="ceilometer-notification-agent" containerID="cri-o://9ae4c4c6a62a9a2f10bc821ef242ba1125325d99df63b0be12288fe453692807" gracePeriod=30 Sep 30 00:31:02 crc kubenswrapper[4809]: I0930 00:31:02.305186 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.943945495 podStartE2EDuration="9.305167154s" podCreationTimestamp="2025-09-30 00:30:53 +0000 UTC" firstStartedPulling="2025-09-30 00:30:55.54248984 +0000 UTC m=+1306.578739248" lastFinishedPulling="2025-09-30 00:31:00.903711499 +0000 UTC m=+1311.939960907" observedRunningTime="2025-09-30 00:31:02.303705184 +0000 UTC m=+1313.339954592" watchObservedRunningTime="2025-09-30 00:31:02.305167154 +0000 UTC m=+1313.341416572" Sep 30 00:31:02 crc kubenswrapper[4809]: I0930 00:31:02.567318 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:31:02 crc kubenswrapper[4809]: I0930 00:31:02.572457 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-76dd7988df-8d5vk" Sep 30 00:31:02 crc kubenswrapper[4809]: I0930 00:31:02.662270 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:31:02 crc kubenswrapper[4809]: I0930 00:31:02.723541 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:31:02 crc kubenswrapper[4809]: I0930 00:31:02.760714 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-84cd7bbbc9-7cs5m"] Sep 30 00:31:02 crc kubenswrapper[4809]: I0930 00:31:02.851263 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-7cc584f69d-tt2h7"] Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.325703 4809 generic.go:334] "Generic (PLEG): container finished" podID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" containerID="c363115972730a86a3f815220b44c70cc0fec8510724b8d5804fac3c3d5b5096" exitCode=0 Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.326093 4809 generic.go:334] "Generic (PLEG): container finished" podID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" containerID="be1eee3226531d797540e824ac57441ab08b509dd72a3fe9abe3a91d0044629e" exitCode=2 Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.326104 4809 generic.go:334] "Generic (PLEG): container finished" podID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" containerID="9ae4c4c6a62a9a2f10bc821ef242ba1125325d99df63b0be12288fe453692807" exitCode=0 Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.326112 4809 generic.go:334] "Generic (PLEG): container finished" podID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" containerID="75f909ea76fd2521468cb8373fe50fdfbeee4cd31af8f517d3e96e2f70a4ded0" exitCode=0 Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.325896 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30a49cca-a01d-45fb-92a0-6957fa3c61a8","Type":"ContainerDied","Data":"c363115972730a86a3f815220b44c70cc0fec8510724b8d5804fac3c3d5b5096"} Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.326206 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30a49cca-a01d-45fb-92a0-6957fa3c61a8","Type":"ContainerDied","Data":"be1eee3226531d797540e824ac57441ab08b509dd72a3fe9abe3a91d0044629e"} Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.326247 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30a49cca-a01d-45fb-92a0-6957fa3c61a8","Type":"ContainerDied","Data":"9ae4c4c6a62a9a2f10bc821ef242ba1125325d99df63b0be12288fe453692807"} Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.326261 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30a49cca-a01d-45fb-92a0-6957fa3c61a8","Type":"ContainerDied","Data":"75f909ea76fd2521468cb8373fe50fdfbeee4cd31af8f517d3e96e2f70a4ded0"} Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.339523 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-84cd7bbbc9-7cs5m" event={"ID":"37e852d4-1ac6-4168-b9c5-1a3cab13a676","Type":"ContainerDied","Data":"523d2344771897b555d6eaa93827a499b9ad32e6951eb5fc3e4f108c7e4935f8"} Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.339686 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="523d2344771897b555d6eaa93827a499b9ad32e6951eb5fc3e4f108c7e4935f8" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.348279 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-84cd7bbbc9-7cs5m" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.443572 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37e852d4-1ac6-4168-b9c5-1a3cab13a676-combined-ca-bundle\") pod \"37e852d4-1ac6-4168-b9c5-1a3cab13a676\" (UID: \"37e852d4-1ac6-4168-b9c5-1a3cab13a676\") " Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.443838 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/37e852d4-1ac6-4168-b9c5-1a3cab13a676-config-data-custom\") pod \"37e852d4-1ac6-4168-b9c5-1a3cab13a676\" (UID: \"37e852d4-1ac6-4168-b9c5-1a3cab13a676\") " Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.443929 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhmcx\" (UniqueName: \"kubernetes.io/projected/37e852d4-1ac6-4168-b9c5-1a3cab13a676-kube-api-access-fhmcx\") pod \"37e852d4-1ac6-4168-b9c5-1a3cab13a676\" (UID: \"37e852d4-1ac6-4168-b9c5-1a3cab13a676\") " Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.443976 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37e852d4-1ac6-4168-b9c5-1a3cab13a676-config-data\") pod \"37e852d4-1ac6-4168-b9c5-1a3cab13a676\" (UID: \"37e852d4-1ac6-4168-b9c5-1a3cab13a676\") " Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.461813 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37e852d4-1ac6-4168-b9c5-1a3cab13a676-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "37e852d4-1ac6-4168-b9c5-1a3cab13a676" (UID: "37e852d4-1ac6-4168-b9c5-1a3cab13a676"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.485785 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37e852d4-1ac6-4168-b9c5-1a3cab13a676-kube-api-access-fhmcx" (OuterVolumeSpecName: "kube-api-access-fhmcx") pod "37e852d4-1ac6-4168-b9c5-1a3cab13a676" (UID: "37e852d4-1ac6-4168-b9c5-1a3cab13a676"). InnerVolumeSpecName "kube-api-access-fhmcx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.537760 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37e852d4-1ac6-4168-b9c5-1a3cab13a676-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "37e852d4-1ac6-4168-b9c5-1a3cab13a676" (UID: "37e852d4-1ac6-4168-b9c5-1a3cab13a676"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.546004 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37e852d4-1ac6-4168-b9c5-1a3cab13a676-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.546038 4809 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/37e852d4-1ac6-4168-b9c5-1a3cab13a676-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.546047 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhmcx\" (UniqueName: \"kubernetes.io/projected/37e852d4-1ac6-4168-b9c5-1a3cab13a676-kube-api-access-fhmcx\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.560819 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37e852d4-1ac6-4168-b9c5-1a3cab13a676-config-data" (OuterVolumeSpecName: "config-data") pod "37e852d4-1ac6-4168-b9c5-1a3cab13a676" (UID: "37e852d4-1ac6-4168-b9c5-1a3cab13a676"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.595118 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-c5884cff9-92mmk" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.651011 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37e852d4-1ac6-4168-b9c5-1a3cab13a676-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.651241 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.660166 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.752814 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9bc06dad-a168-4690-ab0d-5c14b05ef072-config-data-custom\") pod \"9bc06dad-a168-4690-ab0d-5c14b05ef072\" (UID: \"9bc06dad-a168-4690-ab0d-5c14b05ef072\") " Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.752875 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wj8v\" (UniqueName: \"kubernetes.io/projected/30a49cca-a01d-45fb-92a0-6957fa3c61a8-kube-api-access-5wj8v\") pod \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.752957 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bc06dad-a168-4690-ab0d-5c14b05ef072-combined-ca-bundle\") pod \"9bc06dad-a168-4690-ab0d-5c14b05ef072\" (UID: \"9bc06dad-a168-4690-ab0d-5c14b05ef072\") " Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.753029 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-czlrc\" (UniqueName: \"kubernetes.io/projected/9bc06dad-a168-4690-ab0d-5c14b05ef072-kube-api-access-czlrc\") pod \"9bc06dad-a168-4690-ab0d-5c14b05ef072\" (UID: \"9bc06dad-a168-4690-ab0d-5c14b05ef072\") " Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.753056 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bc06dad-a168-4690-ab0d-5c14b05ef072-config-data\") pod \"9bc06dad-a168-4690-ab0d-5c14b05ef072\" (UID: \"9bc06dad-a168-4690-ab0d-5c14b05ef072\") " Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.753071 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-sg-core-conf-yaml\") pod \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.753095 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-config-data\") pod \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.753137 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30a49cca-a01d-45fb-92a0-6957fa3c61a8-run-httpd\") pod \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.753161 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-scripts\") pod \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.753210 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30a49cca-a01d-45fb-92a0-6957fa3c61a8-log-httpd\") pod \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.753262 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-combined-ca-bundle\") pod \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\" (UID: \"30a49cca-a01d-45fb-92a0-6957fa3c61a8\") " Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.759833 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30a49cca-a01d-45fb-92a0-6957fa3c61a8-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "30a49cca-a01d-45fb-92a0-6957fa3c61a8" (UID: "30a49cca-a01d-45fb-92a0-6957fa3c61a8"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.762153 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bc06dad-a168-4690-ab0d-5c14b05ef072-kube-api-access-czlrc" (OuterVolumeSpecName: "kube-api-access-czlrc") pod "9bc06dad-a168-4690-ab0d-5c14b05ef072" (UID: "9bc06dad-a168-4690-ab0d-5c14b05ef072"). InnerVolumeSpecName "kube-api-access-czlrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.771436 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30a49cca-a01d-45fb-92a0-6957fa3c61a8-kube-api-access-5wj8v" (OuterVolumeSpecName: "kube-api-access-5wj8v") pod "30a49cca-a01d-45fb-92a0-6957fa3c61a8" (UID: "30a49cca-a01d-45fb-92a0-6957fa3c61a8"). InnerVolumeSpecName "kube-api-access-5wj8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.773028 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30a49cca-a01d-45fb-92a0-6957fa3c61a8-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "30a49cca-a01d-45fb-92a0-6957fa3c61a8" (UID: "30a49cca-a01d-45fb-92a0-6957fa3c61a8"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.773372 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-scripts" (OuterVolumeSpecName: "scripts") pod "30a49cca-a01d-45fb-92a0-6957fa3c61a8" (UID: "30a49cca-a01d-45fb-92a0-6957fa3c61a8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.788064 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bc06dad-a168-4690-ab0d-5c14b05ef072-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9bc06dad-a168-4690-ab0d-5c14b05ef072" (UID: "9bc06dad-a168-4690-ab0d-5c14b05ef072"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.797871 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bc06dad-a168-4690-ab0d-5c14b05ef072-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9bc06dad-a168-4690-ab0d-5c14b05ef072" (UID: "9bc06dad-a168-4690-ab0d-5c14b05ef072"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.802907 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "30a49cca-a01d-45fb-92a0-6957fa3c61a8" (UID: "30a49cca-a01d-45fb-92a0-6957fa3c61a8"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.837811 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bc06dad-a168-4690-ab0d-5c14b05ef072-config-data" (OuterVolumeSpecName: "config-data") pod "9bc06dad-a168-4690-ab0d-5c14b05ef072" (UID: "9bc06dad-a168-4690-ab0d-5c14b05ef072"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.855573 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "30a49cca-a01d-45fb-92a0-6957fa3c61a8" (UID: "30a49cca-a01d-45fb-92a0-6957fa3c61a8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.856881 4809 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9bc06dad-a168-4690-ab0d-5c14b05ef072-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.856920 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wj8v\" (UniqueName: \"kubernetes.io/projected/30a49cca-a01d-45fb-92a0-6957fa3c61a8-kube-api-access-5wj8v\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.856937 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bc06dad-a168-4690-ab0d-5c14b05ef072-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.856947 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-czlrc\" (UniqueName: \"kubernetes.io/projected/9bc06dad-a168-4690-ab0d-5c14b05ef072-kube-api-access-czlrc\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.856958 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bc06dad-a168-4690-ab0d-5c14b05ef072-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.856968 4809 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.856978 4809 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30a49cca-a01d-45fb-92a0-6957fa3c61a8-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.856987 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.856997 4809 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30a49cca-a01d-45fb-92a0-6957fa3c61a8-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.857007 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.885356 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-config-data" (OuterVolumeSpecName: "config-data") pod "30a49cca-a01d-45fb-92a0-6957fa3c61a8" (UID: "30a49cca-a01d-45fb-92a0-6957fa3c61a8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:03 crc kubenswrapper[4809]: I0930 00:31:03.958898 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30a49cca-a01d-45fb-92a0-6957fa3c61a8-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.352470 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30a49cca-a01d-45fb-92a0-6957fa3c61a8","Type":"ContainerDied","Data":"5c9816cc90a035f0731ac2f290dd00d65f1938d0084f9f9dacb7d43f1946618a"} Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.352526 4809 scope.go:117] "RemoveContainer" containerID="c363115972730a86a3f815220b44c70cc0fec8510724b8d5804fac3c3d5b5096" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.352687 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.355961 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-84cd7bbbc9-7cs5m" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.356911 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.359753 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-7cc584f69d-tt2h7" event={"ID":"9bc06dad-a168-4690-ab0d-5c14b05ef072","Type":"ContainerDied","Data":"25e809d98276d7d0adf98b4dff83740e552d2d40912ee8088797b659c1ff197d"} Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.395803 4809 scope.go:117] "RemoveContainer" containerID="be1eee3226531d797540e824ac57441ab08b509dd72a3fe9abe3a91d0044629e" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.399117 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-84cd7bbbc9-7cs5m"] Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.410124 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-api-84cd7bbbc9-7cs5m"] Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.423262 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-7cc584f69d-tt2h7"] Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.452324 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-cfnapi-7cc584f69d-tt2h7"] Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.463698 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.483961 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.497858 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:31:04 crc kubenswrapper[4809]: E0930 00:31:04.498241 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37e852d4-1ac6-4168-b9c5-1a3cab13a676" containerName="heat-api" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498252 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="37e852d4-1ac6-4168-b9c5-1a3cab13a676" containerName="heat-api" Sep 30 00:31:04 crc kubenswrapper[4809]: E0930 00:31:04.498266 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" containerName="ceilometer-notification-agent" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498272 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" containerName="ceilometer-notification-agent" Sep 30 00:31:04 crc kubenswrapper[4809]: E0930 00:31:04.498298 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" containerName="ceilometer-central-agent" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498305 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" containerName="ceilometer-central-agent" Sep 30 00:31:04 crc kubenswrapper[4809]: E0930 00:31:04.498315 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8de17215-f27c-4eab-a6f5-0ad924367b11" containerName="mariadb-database-create" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498322 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="8de17215-f27c-4eab-a6f5-0ad924367b11" containerName="mariadb-database-create" Sep 30 00:31:04 crc kubenswrapper[4809]: E0930 00:31:04.498331 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97460645-6cce-4b0f-b2a2-d80caee414cd" containerName="mariadb-database-create" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498338 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="97460645-6cce-4b0f-b2a2-d80caee414cd" containerName="mariadb-database-create" Sep 30 00:31:04 crc kubenswrapper[4809]: E0930 00:31:04.498349 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9152f576-abed-4cb6-97ee-3a4efe2ba97f" containerName="mariadb-database-create" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498355 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9152f576-abed-4cb6-97ee-3a4efe2ba97f" containerName="mariadb-database-create" Sep 30 00:31:04 crc kubenswrapper[4809]: E0930 00:31:04.498367 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" containerName="proxy-httpd" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498374 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" containerName="proxy-httpd" Sep 30 00:31:04 crc kubenswrapper[4809]: E0930 00:31:04.498385 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" containerName="sg-core" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498391 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" containerName="sg-core" Sep 30 00:31:04 crc kubenswrapper[4809]: E0930 00:31:04.498404 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bc06dad-a168-4690-ab0d-5c14b05ef072" containerName="heat-cfnapi" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498412 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bc06dad-a168-4690-ab0d-5c14b05ef072" containerName="heat-cfnapi" Sep 30 00:31:04 crc kubenswrapper[4809]: E0930 00:31:04.498426 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bc06dad-a168-4690-ab0d-5c14b05ef072" containerName="heat-cfnapi" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498432 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bc06dad-a168-4690-ab0d-5c14b05ef072" containerName="heat-cfnapi" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498634 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="37e852d4-1ac6-4168-b9c5-1a3cab13a676" containerName="heat-api" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498661 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="97460645-6cce-4b0f-b2a2-d80caee414cd" containerName="mariadb-database-create" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498668 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bc06dad-a168-4690-ab0d-5c14b05ef072" containerName="heat-cfnapi" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498677 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" containerName="ceilometer-notification-agent" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498689 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" containerName="proxy-httpd" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498701 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="37e852d4-1ac6-4168-b9c5-1a3cab13a676" containerName="heat-api" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498715 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="8de17215-f27c-4eab-a6f5-0ad924367b11" containerName="mariadb-database-create" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498730 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" containerName="ceilometer-central-agent" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498738 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" containerName="sg-core" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498746 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9152f576-abed-4cb6-97ee-3a4efe2ba97f" containerName="mariadb-database-create" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498755 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bc06dad-a168-4690-ab0d-5c14b05ef072" containerName="heat-cfnapi" Sep 30 00:31:04 crc kubenswrapper[4809]: E0930 00:31:04.498949 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37e852d4-1ac6-4168-b9c5-1a3cab13a676" containerName="heat-api" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.498957 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="37e852d4-1ac6-4168-b9c5-1a3cab13a676" containerName="heat-api" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.500605 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.500699 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.545925 4809 scope.go:117] "RemoveContainer" containerID="9ae4c4c6a62a9a2f10bc821ef242ba1125325d99df63b0be12288fe453692807" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.552731 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.552931 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.569929 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-scripts\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.569992 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-config-data\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.570085 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9caa3de3-3003-482b-832d-c2a1207cd2e7-run-httpd\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.570198 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.570244 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.570274 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9caa3de3-3003-482b-832d-c2a1207cd2e7-log-httpd\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.570306 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hm88l\" (UniqueName: \"kubernetes.io/projected/9caa3de3-3003-482b-832d-c2a1207cd2e7-kube-api-access-hm88l\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.570879 4809 scope.go:117] "RemoveContainer" containerID="75f909ea76fd2521468cb8373fe50fdfbeee4cd31af8f517d3e96e2f70a4ded0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.590553 4809 scope.go:117] "RemoveContainer" containerID="c8125d5b6d309ac0e059d2630cdc8fa69c5abc118dd223d500ae49248184444b" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.671961 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.672270 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.672373 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9caa3de3-3003-482b-832d-c2a1207cd2e7-log-httpd\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.672450 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hm88l\" (UniqueName: \"kubernetes.io/projected/9caa3de3-3003-482b-832d-c2a1207cd2e7-kube-api-access-hm88l\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.672547 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-scripts\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.672656 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-config-data\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.672839 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9caa3de3-3003-482b-832d-c2a1207cd2e7-run-httpd\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.672927 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9caa3de3-3003-482b-832d-c2a1207cd2e7-log-httpd\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.673290 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9caa3de3-3003-482b-832d-c2a1207cd2e7-run-httpd\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.675794 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.676303 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-scripts\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.677067 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-config-data\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.700167 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.715103 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hm88l\" (UniqueName: \"kubernetes.io/projected/9caa3de3-3003-482b-832d-c2a1207cd2e7-kube-api-access-hm88l\") pod \"ceilometer-0\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " pod="openstack/ceilometer-0" Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.763979 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:31:04 crc kubenswrapper[4809]: I0930 00:31:04.764706 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:31:05 crc kubenswrapper[4809]: W0930 00:31:05.230238 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9caa3de3_3003_482b_832d_c2a1207cd2e7.slice/crio-397db3da0de02541ab65ebb374c84c4603d3539545dac018e2e40f6adb2e7dee WatchSource:0}: Error finding container 397db3da0de02541ab65ebb374c84c4603d3539545dac018e2e40f6adb2e7dee: Status 404 returned error can't find the container with id 397db3da0de02541ab65ebb374c84c4603d3539545dac018e2e40f6adb2e7dee Sep 30 00:31:05 crc kubenswrapper[4809]: I0930 00:31:05.233058 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:31:05 crc kubenswrapper[4809]: I0930 00:31:05.366610 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9caa3de3-3003-482b-832d-c2a1207cd2e7","Type":"ContainerStarted","Data":"397db3da0de02541ab65ebb374c84c4603d3539545dac018e2e40f6adb2e7dee"} Sep 30 00:31:05 crc kubenswrapper[4809]: I0930 00:31:05.702260 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30a49cca-a01d-45fb-92a0-6957fa3c61a8" path="/var/lib/kubelet/pods/30a49cca-a01d-45fb-92a0-6957fa3c61a8/volumes" Sep 30 00:31:05 crc kubenswrapper[4809]: I0930 00:31:05.702990 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37e852d4-1ac6-4168-b9c5-1a3cab13a676" path="/var/lib/kubelet/pods/37e852d4-1ac6-4168-b9c5-1a3cab13a676/volumes" Sep 30 00:31:05 crc kubenswrapper[4809]: I0930 00:31:05.703508 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9bc06dad-a168-4690-ab0d-5c14b05ef072" path="/var/lib/kubelet/pods/9bc06dad-a168-4690-ab0d-5c14b05ef072/volumes" Sep 30 00:31:07 crc kubenswrapper[4809]: I0930 00:31:07.394207 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9caa3de3-3003-482b-832d-c2a1207cd2e7","Type":"ContainerStarted","Data":"31b0820affe756fc69d15e562cf75e64a4573de59db2f9de9e4bd2c105e05e90"} Sep 30 00:31:08 crc kubenswrapper[4809]: I0930 00:31:08.405185 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9caa3de3-3003-482b-832d-c2a1207cd2e7","Type":"ContainerStarted","Data":"8b464112038f1ab3f1ccd2a4c4267fa1c4ee86a4354c7022b82deb88788ac059"} Sep 30 00:31:08 crc kubenswrapper[4809]: I0930 00:31:08.865261 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 30 00:31:09 crc kubenswrapper[4809]: I0930 00:31:09.104888 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-7c4c5654b8-svs85" Sep 30 00:31:09 crc kubenswrapper[4809]: I0930 00:31:09.154843 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-engine-6c8878cd54-q8bdj"] Sep 30 00:31:09 crc kubenswrapper[4809]: I0930 00:31:09.155044 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-engine-6c8878cd54-q8bdj" podUID="30ff7679-5c63-4bb2-a427-90410586d459" containerName="heat-engine" containerID="cri-o://5b8cdaf86e9e37f54b743d5de635867846d5e5fc9ae82a631d226b8b99e4a7be" gracePeriod=60 Sep 30 00:31:09 crc kubenswrapper[4809]: I0930 00:31:09.416631 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9caa3de3-3003-482b-832d-c2a1207cd2e7","Type":"ContainerStarted","Data":"2c1eb314454e5cd9cbd284b2b4e8aa68056a74a16282adba6916ca0b91942d34"} Sep 30 00:31:10 crc kubenswrapper[4809]: I0930 00:31:10.446342 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9caa3de3-3003-482b-832d-c2a1207cd2e7","Type":"ContainerStarted","Data":"e65fb4d92202a9bb5d1d214b51244bea6063640ab61317de7b8a72e3db21be73"} Sep 30 00:31:10 crc kubenswrapper[4809]: I0930 00:31:10.448013 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 00:31:10 crc kubenswrapper[4809]: I0930 00:31:10.446804 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerName="ceilometer-notification-agent" containerID="cri-o://8b464112038f1ab3f1ccd2a4c4267fa1c4ee86a4354c7022b82deb88788ac059" gracePeriod=30 Sep 30 00:31:10 crc kubenswrapper[4809]: I0930 00:31:10.446848 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerName="sg-core" containerID="cri-o://2c1eb314454e5cd9cbd284b2b4e8aa68056a74a16282adba6916ca0b91942d34" gracePeriod=30 Sep 30 00:31:10 crc kubenswrapper[4809]: I0930 00:31:10.446848 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerName="proxy-httpd" containerID="cri-o://e65fb4d92202a9bb5d1d214b51244bea6063640ab61317de7b8a72e3db21be73" gracePeriod=30 Sep 30 00:31:10 crc kubenswrapper[4809]: I0930 00:31:10.446542 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerName="ceilometer-central-agent" containerID="cri-o://31b0820affe756fc69d15e562cf75e64a4573de59db2f9de9e4bd2c105e05e90" gracePeriod=30 Sep 30 00:31:10 crc kubenswrapper[4809]: I0930 00:31:10.479368 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.689749959 podStartE2EDuration="6.479344846s" podCreationTimestamp="2025-09-30 00:31:04 +0000 UTC" firstStartedPulling="2025-09-30 00:31:05.23285364 +0000 UTC m=+1316.269103048" lastFinishedPulling="2025-09-30 00:31:10.022448527 +0000 UTC m=+1321.058697935" observedRunningTime="2025-09-30 00:31:10.465094898 +0000 UTC m=+1321.501344326" watchObservedRunningTime="2025-09-30 00:31:10.479344846 +0000 UTC m=+1321.515594254" Sep 30 00:31:10 crc kubenswrapper[4809]: E0930 00:31:10.991153 4809 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9caa3de3_3003_482b_832d_c2a1207cd2e7.slice/crio-8b464112038f1ab3f1ccd2a4c4267fa1c4ee86a4354c7022b82deb88788ac059.scope\": RecentStats: unable to find data in memory cache]" Sep 30 00:31:11 crc kubenswrapper[4809]: E0930 00:31:11.152444 4809 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5b8cdaf86e9e37f54b743d5de635867846d5e5fc9ae82a631d226b8b99e4a7be" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Sep 30 00:31:11 crc kubenswrapper[4809]: E0930 00:31:11.153946 4809 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5b8cdaf86e9e37f54b743d5de635867846d5e5fc9ae82a631d226b8b99e4a7be" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Sep 30 00:31:11 crc kubenswrapper[4809]: E0930 00:31:11.156412 4809 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5b8cdaf86e9e37f54b743d5de635867846d5e5fc9ae82a631d226b8b99e4a7be" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Sep 30 00:31:11 crc kubenswrapper[4809]: E0930 00:31:11.156472 4809 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/heat-engine-6c8878cd54-q8bdj" podUID="30ff7679-5c63-4bb2-a427-90410586d459" containerName="heat-engine" Sep 30 00:31:11 crc kubenswrapper[4809]: I0930 00:31:11.474234 4809 generic.go:334] "Generic (PLEG): container finished" podID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerID="2c1eb314454e5cd9cbd284b2b4e8aa68056a74a16282adba6916ca0b91942d34" exitCode=2 Sep 30 00:31:11 crc kubenswrapper[4809]: I0930 00:31:11.474266 4809 generic.go:334] "Generic (PLEG): container finished" podID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerID="8b464112038f1ab3f1ccd2a4c4267fa1c4ee86a4354c7022b82deb88788ac059" exitCode=0 Sep 30 00:31:11 crc kubenswrapper[4809]: I0930 00:31:11.474308 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9caa3de3-3003-482b-832d-c2a1207cd2e7","Type":"ContainerDied","Data":"2c1eb314454e5cd9cbd284b2b4e8aa68056a74a16282adba6916ca0b91942d34"} Sep 30 00:31:11 crc kubenswrapper[4809]: I0930 00:31:11.474359 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9caa3de3-3003-482b-832d-c2a1207cd2e7","Type":"ContainerDied","Data":"8b464112038f1ab3f1ccd2a4c4267fa1c4ee86a4354c7022b82deb88788ac059"} Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.388164 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-4218-account-create-m5tjr"] Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.389730 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4218-account-create-m5tjr" Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.411900 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.416175 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-4218-account-create-m5tjr"] Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.550145 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dksvm\" (UniqueName: \"kubernetes.io/projected/7a7e33b7-b3ea-4290-9680-eacdad360a53-kube-api-access-dksvm\") pod \"nova-api-4218-account-create-m5tjr\" (UID: \"7a7e33b7-b3ea-4290-9680-eacdad360a53\") " pod="openstack/nova-api-4218-account-create-m5tjr" Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.588878 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-3a35-account-create-r6tmr"] Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.590597 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-3a35-account-create-r6tmr" Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.599034 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.618116 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-3a35-account-create-r6tmr"] Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.657421 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8j2pp\" (UniqueName: \"kubernetes.io/projected/ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc-kube-api-access-8j2pp\") pod \"nova-cell0-3a35-account-create-r6tmr\" (UID: \"ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc\") " pod="openstack/nova-cell0-3a35-account-create-r6tmr" Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.657504 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dksvm\" (UniqueName: \"kubernetes.io/projected/7a7e33b7-b3ea-4290-9680-eacdad360a53-kube-api-access-dksvm\") pod \"nova-api-4218-account-create-m5tjr\" (UID: \"7a7e33b7-b3ea-4290-9680-eacdad360a53\") " pod="openstack/nova-api-4218-account-create-m5tjr" Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.697493 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dksvm\" (UniqueName: \"kubernetes.io/projected/7a7e33b7-b3ea-4290-9680-eacdad360a53-kube-api-access-dksvm\") pod \"nova-api-4218-account-create-m5tjr\" (UID: \"7a7e33b7-b3ea-4290-9680-eacdad360a53\") " pod="openstack/nova-api-4218-account-create-m5tjr" Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.723329 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4218-account-create-m5tjr" Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.768814 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8j2pp\" (UniqueName: \"kubernetes.io/projected/ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc-kube-api-access-8j2pp\") pod \"nova-cell0-3a35-account-create-r6tmr\" (UID: \"ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc\") " pod="openstack/nova-cell0-3a35-account-create-r6tmr" Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.794374 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8j2pp\" (UniqueName: \"kubernetes.io/projected/ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc-kube-api-access-8j2pp\") pod \"nova-cell0-3a35-account-create-r6tmr\" (UID: \"ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc\") " pod="openstack/nova-cell0-3a35-account-create-r6tmr" Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.797874 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-16f3-account-create-cklrw"] Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.799260 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-16f3-account-create-cklrw" Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.809939 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.832682 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-16f3-account-create-cklrw"] Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.872966 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lsmv\" (UniqueName: \"kubernetes.io/projected/df13f8fe-c553-41f3-bcd6-625d07a7d2de-kube-api-access-9lsmv\") pod \"nova-cell1-16f3-account-create-cklrw\" (UID: \"df13f8fe-c553-41f3-bcd6-625d07a7d2de\") " pod="openstack/nova-cell1-16f3-account-create-cklrw" Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.914420 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-3a35-account-create-r6tmr" Sep 30 00:31:12 crc kubenswrapper[4809]: I0930 00:31:12.975360 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lsmv\" (UniqueName: \"kubernetes.io/projected/df13f8fe-c553-41f3-bcd6-625d07a7d2de-kube-api-access-9lsmv\") pod \"nova-cell1-16f3-account-create-cklrw\" (UID: \"df13f8fe-c553-41f3-bcd6-625d07a7d2de\") " pod="openstack/nova-cell1-16f3-account-create-cklrw" Sep 30 00:31:13 crc kubenswrapper[4809]: I0930 00:31:13.005742 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lsmv\" (UniqueName: \"kubernetes.io/projected/df13f8fe-c553-41f3-bcd6-625d07a7d2de-kube-api-access-9lsmv\") pod \"nova-cell1-16f3-account-create-cklrw\" (UID: \"df13f8fe-c553-41f3-bcd6-625d07a7d2de\") " pod="openstack/nova-cell1-16f3-account-create-cklrw" Sep 30 00:31:13 crc kubenswrapper[4809]: I0930 00:31:13.194033 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-16f3-account-create-cklrw" Sep 30 00:31:13 crc kubenswrapper[4809]: I0930 00:31:13.276946 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-4218-account-create-m5tjr"] Sep 30 00:31:13 crc kubenswrapper[4809]: W0930 00:31:13.291819 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a7e33b7_b3ea_4290_9680_eacdad360a53.slice/crio-35dba79cf9d4d8bb24470026d6010444d10737a5915bfea91d93bf6754557385 WatchSource:0}: Error finding container 35dba79cf9d4d8bb24470026d6010444d10737a5915bfea91d93bf6754557385: Status 404 returned error can't find the container with id 35dba79cf9d4d8bb24470026d6010444d10737a5915bfea91d93bf6754557385 Sep 30 00:31:13 crc kubenswrapper[4809]: I0930 00:31:13.483302 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-3a35-account-create-r6tmr"] Sep 30 00:31:13 crc kubenswrapper[4809]: W0930 00:31:13.486942 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea156bcc_d8c5_4cff_a3d7_cfa69e7b80dc.slice/crio-e87c29b4c73a9499bb969a436fbdb7505bf5f46d24c58c25a12f9376f0fc0571 WatchSource:0}: Error finding container e87c29b4c73a9499bb969a436fbdb7505bf5f46d24c58c25a12f9376f0fc0571: Status 404 returned error can't find the container with id e87c29b4c73a9499bb969a436fbdb7505bf5f46d24c58c25a12f9376f0fc0571 Sep 30 00:31:13 crc kubenswrapper[4809]: I0930 00:31:13.533994 4809 generic.go:334] "Generic (PLEG): container finished" podID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerID="31b0820affe756fc69d15e562cf75e64a4573de59db2f9de9e4bd2c105e05e90" exitCode=0 Sep 30 00:31:13 crc kubenswrapper[4809]: I0930 00:31:13.534072 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9caa3de3-3003-482b-832d-c2a1207cd2e7","Type":"ContainerDied","Data":"31b0820affe756fc69d15e562cf75e64a4573de59db2f9de9e4bd2c105e05e90"} Sep 30 00:31:13 crc kubenswrapper[4809]: I0930 00:31:13.542222 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-3a35-account-create-r6tmr" event={"ID":"ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc","Type":"ContainerStarted","Data":"e87c29b4c73a9499bb969a436fbdb7505bf5f46d24c58c25a12f9376f0fc0571"} Sep 30 00:31:13 crc kubenswrapper[4809]: I0930 00:31:13.557729 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4218-account-create-m5tjr" event={"ID":"7a7e33b7-b3ea-4290-9680-eacdad360a53","Type":"ContainerStarted","Data":"35dba79cf9d4d8bb24470026d6010444d10737a5915bfea91d93bf6754557385"} Sep 30 00:31:13 crc kubenswrapper[4809]: I0930 00:31:13.572457 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-4218-account-create-m5tjr" podStartSLOduration=1.572435766 podStartE2EDuration="1.572435766s" podCreationTimestamp="2025-09-30 00:31:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:31:13.572299412 +0000 UTC m=+1324.608548820" watchObservedRunningTime="2025-09-30 00:31:13.572435766 +0000 UTC m=+1324.608685174" Sep 30 00:31:13 crc kubenswrapper[4809]: I0930 00:31:13.725277 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-16f3-account-create-cklrw"] Sep 30 00:31:14 crc kubenswrapper[4809]: I0930 00:31:14.569469 4809 generic.go:334] "Generic (PLEG): container finished" podID="df13f8fe-c553-41f3-bcd6-625d07a7d2de" containerID="63a0486be55448e34ddfc2f5eb50502e755d08247b8c58d5cf8eeab0da4b6a56" exitCode=0 Sep 30 00:31:14 crc kubenswrapper[4809]: I0930 00:31:14.569577 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-16f3-account-create-cklrw" event={"ID":"df13f8fe-c553-41f3-bcd6-625d07a7d2de","Type":"ContainerDied","Data":"63a0486be55448e34ddfc2f5eb50502e755d08247b8c58d5cf8eeab0da4b6a56"} Sep 30 00:31:14 crc kubenswrapper[4809]: I0930 00:31:14.569913 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-16f3-account-create-cklrw" event={"ID":"df13f8fe-c553-41f3-bcd6-625d07a7d2de","Type":"ContainerStarted","Data":"c096e9986bffb5025c02c7e8bd120c4ed07a13a3e7cf3d63093ebb7a0ac54a71"} Sep 30 00:31:14 crc kubenswrapper[4809]: I0930 00:31:14.571612 4809 generic.go:334] "Generic (PLEG): container finished" podID="7a7e33b7-b3ea-4290-9680-eacdad360a53" containerID="d11e618a1ce17a6be1fbebcb668b532c873bc5293ca1c2b63879b8044cf79faa" exitCode=0 Sep 30 00:31:14 crc kubenswrapper[4809]: I0930 00:31:14.571660 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4218-account-create-m5tjr" event={"ID":"7a7e33b7-b3ea-4290-9680-eacdad360a53","Type":"ContainerDied","Data":"d11e618a1ce17a6be1fbebcb668b532c873bc5293ca1c2b63879b8044cf79faa"} Sep 30 00:31:14 crc kubenswrapper[4809]: I0930 00:31:14.573450 4809 generic.go:334] "Generic (PLEG): container finished" podID="ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc" containerID="e08ad1e4e5b32ec57967d012dc1509136484554eafbcf91dce3380dbfa8d1bb3" exitCode=0 Sep 30 00:31:14 crc kubenswrapper[4809]: I0930 00:31:14.573480 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-3a35-account-create-r6tmr" event={"ID":"ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc","Type":"ContainerDied","Data":"e08ad1e4e5b32ec57967d012dc1509136484554eafbcf91dce3380dbfa8d1bb3"} Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.179576 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-3a35-account-create-r6tmr" Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.353575 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8j2pp\" (UniqueName: \"kubernetes.io/projected/ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc-kube-api-access-8j2pp\") pod \"ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc\" (UID: \"ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc\") " Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.382937 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc-kube-api-access-8j2pp" (OuterVolumeSpecName: "kube-api-access-8j2pp") pod "ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc" (UID: "ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc"). InnerVolumeSpecName "kube-api-access-8j2pp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.458218 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8j2pp\" (UniqueName: \"kubernetes.io/projected/ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc-kube-api-access-8j2pp\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.514103 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4218-account-create-m5tjr" Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.521632 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-16f3-account-create-cklrw" Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.606665 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-3a35-account-create-r6tmr" event={"ID":"ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc","Type":"ContainerDied","Data":"e87c29b4c73a9499bb969a436fbdb7505bf5f46d24c58c25a12f9376f0fc0571"} Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.606706 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e87c29b4c73a9499bb969a436fbdb7505bf5f46d24c58c25a12f9376f0fc0571" Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.606735 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-3a35-account-create-r6tmr" Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.614783 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-16f3-account-create-cklrw" event={"ID":"df13f8fe-c553-41f3-bcd6-625d07a7d2de","Type":"ContainerDied","Data":"c096e9986bffb5025c02c7e8bd120c4ed07a13a3e7cf3d63093ebb7a0ac54a71"} Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.614826 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c096e9986bffb5025c02c7e8bd120c4ed07a13a3e7cf3d63093ebb7a0ac54a71" Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.614903 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-16f3-account-create-cklrw" Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.621100 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4218-account-create-m5tjr" event={"ID":"7a7e33b7-b3ea-4290-9680-eacdad360a53","Type":"ContainerDied","Data":"35dba79cf9d4d8bb24470026d6010444d10737a5915bfea91d93bf6754557385"} Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.621135 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35dba79cf9d4d8bb24470026d6010444d10737a5915bfea91d93bf6754557385" Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.621220 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4218-account-create-m5tjr" Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.664502 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dksvm\" (UniqueName: \"kubernetes.io/projected/7a7e33b7-b3ea-4290-9680-eacdad360a53-kube-api-access-dksvm\") pod \"7a7e33b7-b3ea-4290-9680-eacdad360a53\" (UID: \"7a7e33b7-b3ea-4290-9680-eacdad360a53\") " Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.664659 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9lsmv\" (UniqueName: \"kubernetes.io/projected/df13f8fe-c553-41f3-bcd6-625d07a7d2de-kube-api-access-9lsmv\") pod \"df13f8fe-c553-41f3-bcd6-625d07a7d2de\" (UID: \"df13f8fe-c553-41f3-bcd6-625d07a7d2de\") " Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.668983 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a7e33b7-b3ea-4290-9680-eacdad360a53-kube-api-access-dksvm" (OuterVolumeSpecName: "kube-api-access-dksvm") pod "7a7e33b7-b3ea-4290-9680-eacdad360a53" (UID: "7a7e33b7-b3ea-4290-9680-eacdad360a53"). InnerVolumeSpecName "kube-api-access-dksvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.671852 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df13f8fe-c553-41f3-bcd6-625d07a7d2de-kube-api-access-9lsmv" (OuterVolumeSpecName: "kube-api-access-9lsmv") pod "df13f8fe-c553-41f3-bcd6-625d07a7d2de" (UID: "df13f8fe-c553-41f3-bcd6-625d07a7d2de"). InnerVolumeSpecName "kube-api-access-9lsmv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.767236 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dksvm\" (UniqueName: \"kubernetes.io/projected/7a7e33b7-b3ea-4290-9680-eacdad360a53-kube-api-access-dksvm\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:16 crc kubenswrapper[4809]: I0930 00:31:16.767278 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9lsmv\" (UniqueName: \"kubernetes.io/projected/df13f8fe-c553-41f3-bcd6-625d07a7d2de-kube-api-access-9lsmv\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:17 crc kubenswrapper[4809]: I0930 00:31:17.923678 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-9thmh"] Sep 30 00:31:17 crc kubenswrapper[4809]: E0930 00:31:17.924515 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc" containerName="mariadb-account-create" Sep 30 00:31:17 crc kubenswrapper[4809]: I0930 00:31:17.924533 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc" containerName="mariadb-account-create" Sep 30 00:31:17 crc kubenswrapper[4809]: E0930 00:31:17.924562 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a7e33b7-b3ea-4290-9680-eacdad360a53" containerName="mariadb-account-create" Sep 30 00:31:17 crc kubenswrapper[4809]: I0930 00:31:17.924572 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a7e33b7-b3ea-4290-9680-eacdad360a53" containerName="mariadb-account-create" Sep 30 00:31:17 crc kubenswrapper[4809]: E0930 00:31:17.924586 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df13f8fe-c553-41f3-bcd6-625d07a7d2de" containerName="mariadb-account-create" Sep 30 00:31:17 crc kubenswrapper[4809]: I0930 00:31:17.924593 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="df13f8fe-c553-41f3-bcd6-625d07a7d2de" containerName="mariadb-account-create" Sep 30 00:31:17 crc kubenswrapper[4809]: I0930 00:31:17.924819 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="df13f8fe-c553-41f3-bcd6-625d07a7d2de" containerName="mariadb-account-create" Sep 30 00:31:17 crc kubenswrapper[4809]: I0930 00:31:17.924848 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a7e33b7-b3ea-4290-9680-eacdad360a53" containerName="mariadb-account-create" Sep 30 00:31:17 crc kubenswrapper[4809]: I0930 00:31:17.924858 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc" containerName="mariadb-account-create" Sep 30 00:31:17 crc kubenswrapper[4809]: I0930 00:31:17.925694 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-9thmh" Sep 30 00:31:17 crc kubenswrapper[4809]: I0930 00:31:17.927299 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-q6p9b" Sep 30 00:31:17 crc kubenswrapper[4809]: I0930 00:31:17.929437 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Sep 30 00:31:17 crc kubenswrapper[4809]: I0930 00:31:17.929439 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 00:31:17 crc kubenswrapper[4809]: I0930 00:31:17.933297 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-9thmh"] Sep 30 00:31:18 crc kubenswrapper[4809]: I0930 00:31:18.092695 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f81f791a-b60f-4d43-b645-47308146aac2-config-data\") pod \"nova-cell0-conductor-db-sync-9thmh\" (UID: \"f81f791a-b60f-4d43-b645-47308146aac2\") " pod="openstack/nova-cell0-conductor-db-sync-9thmh" Sep 30 00:31:18 crc kubenswrapper[4809]: I0930 00:31:18.092739 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpq4b\" (UniqueName: \"kubernetes.io/projected/f81f791a-b60f-4d43-b645-47308146aac2-kube-api-access-gpq4b\") pod \"nova-cell0-conductor-db-sync-9thmh\" (UID: \"f81f791a-b60f-4d43-b645-47308146aac2\") " pod="openstack/nova-cell0-conductor-db-sync-9thmh" Sep 30 00:31:18 crc kubenswrapper[4809]: I0930 00:31:18.092795 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f81f791a-b60f-4d43-b645-47308146aac2-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-9thmh\" (UID: \"f81f791a-b60f-4d43-b645-47308146aac2\") " pod="openstack/nova-cell0-conductor-db-sync-9thmh" Sep 30 00:31:18 crc kubenswrapper[4809]: I0930 00:31:18.093032 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f81f791a-b60f-4d43-b645-47308146aac2-scripts\") pod \"nova-cell0-conductor-db-sync-9thmh\" (UID: \"f81f791a-b60f-4d43-b645-47308146aac2\") " pod="openstack/nova-cell0-conductor-db-sync-9thmh" Sep 30 00:31:18 crc kubenswrapper[4809]: I0930 00:31:18.195509 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f81f791a-b60f-4d43-b645-47308146aac2-config-data\") pod \"nova-cell0-conductor-db-sync-9thmh\" (UID: \"f81f791a-b60f-4d43-b645-47308146aac2\") " pod="openstack/nova-cell0-conductor-db-sync-9thmh" Sep 30 00:31:18 crc kubenswrapper[4809]: I0930 00:31:18.195578 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpq4b\" (UniqueName: \"kubernetes.io/projected/f81f791a-b60f-4d43-b645-47308146aac2-kube-api-access-gpq4b\") pod \"nova-cell0-conductor-db-sync-9thmh\" (UID: \"f81f791a-b60f-4d43-b645-47308146aac2\") " pod="openstack/nova-cell0-conductor-db-sync-9thmh" Sep 30 00:31:18 crc kubenswrapper[4809]: I0930 00:31:18.195667 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f81f791a-b60f-4d43-b645-47308146aac2-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-9thmh\" (UID: \"f81f791a-b60f-4d43-b645-47308146aac2\") " pod="openstack/nova-cell0-conductor-db-sync-9thmh" Sep 30 00:31:18 crc kubenswrapper[4809]: I0930 00:31:18.195752 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f81f791a-b60f-4d43-b645-47308146aac2-scripts\") pod \"nova-cell0-conductor-db-sync-9thmh\" (UID: \"f81f791a-b60f-4d43-b645-47308146aac2\") " pod="openstack/nova-cell0-conductor-db-sync-9thmh" Sep 30 00:31:18 crc kubenswrapper[4809]: I0930 00:31:18.202482 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f81f791a-b60f-4d43-b645-47308146aac2-config-data\") pod \"nova-cell0-conductor-db-sync-9thmh\" (UID: \"f81f791a-b60f-4d43-b645-47308146aac2\") " pod="openstack/nova-cell0-conductor-db-sync-9thmh" Sep 30 00:31:18 crc kubenswrapper[4809]: I0930 00:31:18.202728 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f81f791a-b60f-4d43-b645-47308146aac2-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-9thmh\" (UID: \"f81f791a-b60f-4d43-b645-47308146aac2\") " pod="openstack/nova-cell0-conductor-db-sync-9thmh" Sep 30 00:31:18 crc kubenswrapper[4809]: I0930 00:31:18.206510 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f81f791a-b60f-4d43-b645-47308146aac2-scripts\") pod \"nova-cell0-conductor-db-sync-9thmh\" (UID: \"f81f791a-b60f-4d43-b645-47308146aac2\") " pod="openstack/nova-cell0-conductor-db-sync-9thmh" Sep 30 00:31:18 crc kubenswrapper[4809]: I0930 00:31:18.212174 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpq4b\" (UniqueName: \"kubernetes.io/projected/f81f791a-b60f-4d43-b645-47308146aac2-kube-api-access-gpq4b\") pod \"nova-cell0-conductor-db-sync-9thmh\" (UID: \"f81f791a-b60f-4d43-b645-47308146aac2\") " pod="openstack/nova-cell0-conductor-db-sync-9thmh" Sep 30 00:31:18 crc kubenswrapper[4809]: I0930 00:31:18.243196 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-9thmh" Sep 30 00:31:18 crc kubenswrapper[4809]: I0930 00:31:18.754136 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-9thmh"] Sep 30 00:31:19 crc kubenswrapper[4809]: I0930 00:31:19.688484 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-9thmh" event={"ID":"f81f791a-b60f-4d43-b645-47308146aac2","Type":"ContainerStarted","Data":"b913024ba5acec33d78542afc0b690ce341b6591ba8e21e620d9a448ac16a817"} Sep 30 00:31:21 crc kubenswrapper[4809]: E0930 00:31:21.151475 4809 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5b8cdaf86e9e37f54b743d5de635867846d5e5fc9ae82a631d226b8b99e4a7be" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Sep 30 00:31:21 crc kubenswrapper[4809]: E0930 00:31:21.153292 4809 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5b8cdaf86e9e37f54b743d5de635867846d5e5fc9ae82a631d226b8b99e4a7be" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Sep 30 00:31:21 crc kubenswrapper[4809]: E0930 00:31:21.154673 4809 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5b8cdaf86e9e37f54b743d5de635867846d5e5fc9ae82a631d226b8b99e4a7be" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Sep 30 00:31:21 crc kubenswrapper[4809]: E0930 00:31:21.154711 4809 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/heat-engine-6c8878cd54-q8bdj" podUID="30ff7679-5c63-4bb2-a427-90410586d459" containerName="heat-engine" Sep 30 00:31:21 crc kubenswrapper[4809]: I0930 00:31:21.742449 4809 generic.go:334] "Generic (PLEG): container finished" podID="30ff7679-5c63-4bb2-a427-90410586d459" containerID="5b8cdaf86e9e37f54b743d5de635867846d5e5fc9ae82a631d226b8b99e4a7be" exitCode=0 Sep 30 00:31:21 crc kubenswrapper[4809]: I0930 00:31:21.742726 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-6c8878cd54-q8bdj" event={"ID":"30ff7679-5c63-4bb2-a427-90410586d459","Type":"ContainerDied","Data":"5b8cdaf86e9e37f54b743d5de635867846d5e5fc9ae82a631d226b8b99e4a7be"} Sep 30 00:31:22 crc kubenswrapper[4809]: I0930 00:31:22.020128 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-6c8878cd54-q8bdj" Sep 30 00:31:22 crc kubenswrapper[4809]: I0930 00:31:22.208719 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ff7679-5c63-4bb2-a427-90410586d459-combined-ca-bundle\") pod \"30ff7679-5c63-4bb2-a427-90410586d459\" (UID: \"30ff7679-5c63-4bb2-a427-90410586d459\") " Sep 30 00:31:22 crc kubenswrapper[4809]: I0930 00:31:22.208777 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szqtx\" (UniqueName: \"kubernetes.io/projected/30ff7679-5c63-4bb2-a427-90410586d459-kube-api-access-szqtx\") pod \"30ff7679-5c63-4bb2-a427-90410586d459\" (UID: \"30ff7679-5c63-4bb2-a427-90410586d459\") " Sep 30 00:31:22 crc kubenswrapper[4809]: I0930 00:31:22.208937 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30ff7679-5c63-4bb2-a427-90410586d459-config-data-custom\") pod \"30ff7679-5c63-4bb2-a427-90410586d459\" (UID: \"30ff7679-5c63-4bb2-a427-90410586d459\") " Sep 30 00:31:22 crc kubenswrapper[4809]: I0930 00:31:22.208965 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ff7679-5c63-4bb2-a427-90410586d459-config-data\") pod \"30ff7679-5c63-4bb2-a427-90410586d459\" (UID: \"30ff7679-5c63-4bb2-a427-90410586d459\") " Sep 30 00:31:22 crc kubenswrapper[4809]: I0930 00:31:22.222526 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30ff7679-5c63-4bb2-a427-90410586d459-kube-api-access-szqtx" (OuterVolumeSpecName: "kube-api-access-szqtx") pod "30ff7679-5c63-4bb2-a427-90410586d459" (UID: "30ff7679-5c63-4bb2-a427-90410586d459"). InnerVolumeSpecName "kube-api-access-szqtx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:31:22 crc kubenswrapper[4809]: I0930 00:31:22.250794 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30ff7679-5c63-4bb2-a427-90410586d459-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "30ff7679-5c63-4bb2-a427-90410586d459" (UID: "30ff7679-5c63-4bb2-a427-90410586d459"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:22 crc kubenswrapper[4809]: I0930 00:31:22.275811 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30ff7679-5c63-4bb2-a427-90410586d459-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "30ff7679-5c63-4bb2-a427-90410586d459" (UID: "30ff7679-5c63-4bb2-a427-90410586d459"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:22 crc kubenswrapper[4809]: I0930 00:31:22.312270 4809 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30ff7679-5c63-4bb2-a427-90410586d459-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:22 crc kubenswrapper[4809]: I0930 00:31:22.312510 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ff7679-5c63-4bb2-a427-90410586d459-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:22 crc kubenswrapper[4809]: I0930 00:31:22.312616 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szqtx\" (UniqueName: \"kubernetes.io/projected/30ff7679-5c63-4bb2-a427-90410586d459-kube-api-access-szqtx\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:22 crc kubenswrapper[4809]: I0930 00:31:22.316838 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30ff7679-5c63-4bb2-a427-90410586d459-config-data" (OuterVolumeSpecName: "config-data") pod "30ff7679-5c63-4bb2-a427-90410586d459" (UID: "30ff7679-5c63-4bb2-a427-90410586d459"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:22 crc kubenswrapper[4809]: I0930 00:31:22.414906 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ff7679-5c63-4bb2-a427-90410586d459-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:22 crc kubenswrapper[4809]: I0930 00:31:22.755560 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-6c8878cd54-q8bdj" event={"ID":"30ff7679-5c63-4bb2-a427-90410586d459","Type":"ContainerDied","Data":"3fb5a0dd8720e4491057fec1ecea10a9454198b8f6e120fd18c7142c25a3f38b"} Sep 30 00:31:22 crc kubenswrapper[4809]: I0930 00:31:22.755623 4809 scope.go:117] "RemoveContainer" containerID="5b8cdaf86e9e37f54b743d5de635867846d5e5fc9ae82a631d226b8b99e4a7be" Sep 30 00:31:22 crc kubenswrapper[4809]: I0930 00:31:22.755774 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-6c8878cd54-q8bdj" Sep 30 00:31:22 crc kubenswrapper[4809]: I0930 00:31:22.800576 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-engine-6c8878cd54-q8bdj"] Sep 30 00:31:22 crc kubenswrapper[4809]: I0930 00:31:22.808393 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-engine-6c8878cd54-q8bdj"] Sep 30 00:31:23 crc kubenswrapper[4809]: I0930 00:31:23.704422 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30ff7679-5c63-4bb2-a427-90410586d459" path="/var/lib/kubelet/pods/30ff7679-5c63-4bb2-a427-90410586d459/volumes" Sep 30 00:31:25 crc kubenswrapper[4809]: I0930 00:31:25.324738 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:31:25 crc kubenswrapper[4809]: I0930 00:31:25.325622 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:31:25 crc kubenswrapper[4809]: I0930 00:31:25.325690 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:31:25 crc kubenswrapper[4809]: I0930 00:31:25.326513 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ea47a4425c6b80c1d2b34f75ad9b62ec24ab7c72e09d6e8962f4a70eaa824489"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:31:25 crc kubenswrapper[4809]: I0930 00:31:25.326582 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://ea47a4425c6b80c1d2b34f75ad9b62ec24ab7c72e09d6e8962f4a70eaa824489" gracePeriod=600 Sep 30 00:31:25 crc kubenswrapper[4809]: I0930 00:31:25.790891 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="ea47a4425c6b80c1d2b34f75ad9b62ec24ab7c72e09d6e8962f4a70eaa824489" exitCode=0 Sep 30 00:31:25 crc kubenswrapper[4809]: I0930 00:31:25.790943 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"ea47a4425c6b80c1d2b34f75ad9b62ec24ab7c72e09d6e8962f4a70eaa824489"} Sep 30 00:31:26 crc kubenswrapper[4809]: I0930 00:31:26.845918 4809 scope.go:117] "RemoveContainer" containerID="980f1cfcbca928da130d5259c71b79d3b5762bb3b4baa2fd8b3f457e575da03a" Sep 30 00:31:27 crc kubenswrapper[4809]: I0930 00:31:27.816462 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-9thmh" event={"ID":"f81f791a-b60f-4d43-b645-47308146aac2","Type":"ContainerStarted","Data":"df86103d3a237b581adb6ffa0c0fce1ea64ea0b5216c4e436af204ec24ead13b"} Sep 30 00:31:27 crc kubenswrapper[4809]: I0930 00:31:27.819695 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac"} Sep 30 00:31:27 crc kubenswrapper[4809]: I0930 00:31:27.845741 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-9thmh" podStartSLOduration=2.704238404 podStartE2EDuration="10.845718757s" podCreationTimestamp="2025-09-30 00:31:17 +0000 UTC" firstStartedPulling="2025-09-30 00:31:18.778692056 +0000 UTC m=+1329.814941464" lastFinishedPulling="2025-09-30 00:31:26.920172409 +0000 UTC m=+1337.956421817" observedRunningTime="2025-09-30 00:31:27.836296931 +0000 UTC m=+1338.872546339" watchObservedRunningTime="2025-09-30 00:31:27.845718757 +0000 UTC m=+1338.881968165" Sep 30 00:31:34 crc kubenswrapper[4809]: I0930 00:31:34.772167 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Sep 30 00:31:39 crc kubenswrapper[4809]: I0930 00:31:39.948201 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-9thmh" event={"ID":"f81f791a-b60f-4d43-b645-47308146aac2","Type":"ContainerDied","Data":"df86103d3a237b581adb6ffa0c0fce1ea64ea0b5216c4e436af204ec24ead13b"} Sep 30 00:31:39 crc kubenswrapper[4809]: I0930 00:31:39.948196 4809 generic.go:334] "Generic (PLEG): container finished" podID="f81f791a-b60f-4d43-b645-47308146aac2" containerID="df86103d3a237b581adb6ffa0c0fce1ea64ea0b5216c4e436af204ec24ead13b" exitCode=0 Sep 30 00:31:40 crc kubenswrapper[4809]: I0930 00:31:40.961104 4809 generic.go:334] "Generic (PLEG): container finished" podID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerID="e65fb4d92202a9bb5d1d214b51244bea6063640ab61317de7b8a72e3db21be73" exitCode=137 Sep 30 00:31:40 crc kubenswrapper[4809]: I0930 00:31:40.961171 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9caa3de3-3003-482b-832d-c2a1207cd2e7","Type":"ContainerDied","Data":"e65fb4d92202a9bb5d1d214b51244bea6063640ab61317de7b8a72e3db21be73"} Sep 30 00:31:40 crc kubenswrapper[4809]: I0930 00:31:40.961438 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9caa3de3-3003-482b-832d-c2a1207cd2e7","Type":"ContainerDied","Data":"397db3da0de02541ab65ebb374c84c4603d3539545dac018e2e40f6adb2e7dee"} Sep 30 00:31:40 crc kubenswrapper[4809]: I0930 00:31:40.961454 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="397db3da0de02541ab65ebb374c84c4603d3539545dac018e2e40f6adb2e7dee" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.028584 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.162194 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9caa3de3-3003-482b-832d-c2a1207cd2e7-run-httpd\") pod \"9caa3de3-3003-482b-832d-c2a1207cd2e7\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.162544 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-scripts\") pod \"9caa3de3-3003-482b-832d-c2a1207cd2e7\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.162587 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9caa3de3-3003-482b-832d-c2a1207cd2e7-log-httpd\") pod \"9caa3de3-3003-482b-832d-c2a1207cd2e7\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.162744 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-config-data\") pod \"9caa3de3-3003-482b-832d-c2a1207cd2e7\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.162819 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-combined-ca-bundle\") pod \"9caa3de3-3003-482b-832d-c2a1207cd2e7\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.162839 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-sg-core-conf-yaml\") pod \"9caa3de3-3003-482b-832d-c2a1207cd2e7\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.162861 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hm88l\" (UniqueName: \"kubernetes.io/projected/9caa3de3-3003-482b-832d-c2a1207cd2e7-kube-api-access-hm88l\") pod \"9caa3de3-3003-482b-832d-c2a1207cd2e7\" (UID: \"9caa3de3-3003-482b-832d-c2a1207cd2e7\") " Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.163215 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9caa3de3-3003-482b-832d-c2a1207cd2e7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9caa3de3-3003-482b-832d-c2a1207cd2e7" (UID: "9caa3de3-3003-482b-832d-c2a1207cd2e7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.163409 4809 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9caa3de3-3003-482b-832d-c2a1207cd2e7-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.163904 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9caa3de3-3003-482b-832d-c2a1207cd2e7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9caa3de3-3003-482b-832d-c2a1207cd2e7" (UID: "9caa3de3-3003-482b-832d-c2a1207cd2e7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.169785 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-scripts" (OuterVolumeSpecName: "scripts") pod "9caa3de3-3003-482b-832d-c2a1207cd2e7" (UID: "9caa3de3-3003-482b-832d-c2a1207cd2e7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.184102 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9caa3de3-3003-482b-832d-c2a1207cd2e7-kube-api-access-hm88l" (OuterVolumeSpecName: "kube-api-access-hm88l") pod "9caa3de3-3003-482b-832d-c2a1207cd2e7" (UID: "9caa3de3-3003-482b-832d-c2a1207cd2e7"). InnerVolumeSpecName "kube-api-access-hm88l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.200973 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "9caa3de3-3003-482b-832d-c2a1207cd2e7" (UID: "9caa3de3-3003-482b-832d-c2a1207cd2e7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.254195 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9caa3de3-3003-482b-832d-c2a1207cd2e7" (UID: "9caa3de3-3003-482b-832d-c2a1207cd2e7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.265753 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.265917 4809 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.265977 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hm88l\" (UniqueName: \"kubernetes.io/projected/9caa3de3-3003-482b-832d-c2a1207cd2e7-kube-api-access-hm88l\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.266034 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.266091 4809 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9caa3de3-3003-482b-832d-c2a1207cd2e7-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.279425 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-config-data" (OuterVolumeSpecName: "config-data") pod "9caa3de3-3003-482b-832d-c2a1207cd2e7" (UID: "9caa3de3-3003-482b-832d-c2a1207cd2e7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.295197 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-9thmh" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.367219 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpq4b\" (UniqueName: \"kubernetes.io/projected/f81f791a-b60f-4d43-b645-47308146aac2-kube-api-access-gpq4b\") pod \"f81f791a-b60f-4d43-b645-47308146aac2\" (UID: \"f81f791a-b60f-4d43-b645-47308146aac2\") " Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.367356 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f81f791a-b60f-4d43-b645-47308146aac2-config-data\") pod \"f81f791a-b60f-4d43-b645-47308146aac2\" (UID: \"f81f791a-b60f-4d43-b645-47308146aac2\") " Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.367402 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f81f791a-b60f-4d43-b645-47308146aac2-scripts\") pod \"f81f791a-b60f-4d43-b645-47308146aac2\" (UID: \"f81f791a-b60f-4d43-b645-47308146aac2\") " Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.367435 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f81f791a-b60f-4d43-b645-47308146aac2-combined-ca-bundle\") pod \"f81f791a-b60f-4d43-b645-47308146aac2\" (UID: \"f81f791a-b60f-4d43-b645-47308146aac2\") " Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.367855 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9caa3de3-3003-482b-832d-c2a1207cd2e7-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.369919 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f81f791a-b60f-4d43-b645-47308146aac2-kube-api-access-gpq4b" (OuterVolumeSpecName: "kube-api-access-gpq4b") pod "f81f791a-b60f-4d43-b645-47308146aac2" (UID: "f81f791a-b60f-4d43-b645-47308146aac2"). InnerVolumeSpecName "kube-api-access-gpq4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.370632 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f81f791a-b60f-4d43-b645-47308146aac2-scripts" (OuterVolumeSpecName: "scripts") pod "f81f791a-b60f-4d43-b645-47308146aac2" (UID: "f81f791a-b60f-4d43-b645-47308146aac2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.391817 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f81f791a-b60f-4d43-b645-47308146aac2-config-data" (OuterVolumeSpecName: "config-data") pod "f81f791a-b60f-4d43-b645-47308146aac2" (UID: "f81f791a-b60f-4d43-b645-47308146aac2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.393219 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f81f791a-b60f-4d43-b645-47308146aac2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f81f791a-b60f-4d43-b645-47308146aac2" (UID: "f81f791a-b60f-4d43-b645-47308146aac2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.469366 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f81f791a-b60f-4d43-b645-47308146aac2-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.469400 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f81f791a-b60f-4d43-b645-47308146aac2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.469410 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpq4b\" (UniqueName: \"kubernetes.io/projected/f81f791a-b60f-4d43-b645-47308146aac2-kube-api-access-gpq4b\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.469421 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f81f791a-b60f-4d43-b645-47308146aac2-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:41 crc kubenswrapper[4809]: E0930 00:31:41.900066 4809 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf81f791a_b60f_4d43_b645_47308146aac2.slice/crio-b913024ba5acec33d78542afc0b690ce341b6591ba8e21e620d9a448ac16a817\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9caa3de3_3003_482b_832d_c2a1207cd2e7.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9caa3de3_3003_482b_832d_c2a1207cd2e7.slice/crio-397db3da0de02541ab65ebb374c84c4603d3539545dac018e2e40f6adb2e7dee\": RecentStats: unable to find data in memory cache]" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.974222 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.976127 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-9thmh" Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.976503 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-9thmh" event={"ID":"f81f791a-b60f-4d43-b645-47308146aac2","Type":"ContainerDied","Data":"b913024ba5acec33d78542afc0b690ce341b6591ba8e21e620d9a448ac16a817"} Sep 30 00:31:41 crc kubenswrapper[4809]: I0930 00:31:41.976530 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b913024ba5acec33d78542afc0b690ce341b6591ba8e21e620d9a448ac16a817" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.013499 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.050126 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.063798 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:31:42 crc kubenswrapper[4809]: E0930 00:31:42.064337 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerName="proxy-httpd" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.064361 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerName="proxy-httpd" Sep 30 00:31:42 crc kubenswrapper[4809]: E0930 00:31:42.064375 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerName="ceilometer-notification-agent" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.064382 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerName="ceilometer-notification-agent" Sep 30 00:31:42 crc kubenswrapper[4809]: E0930 00:31:42.064398 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerName="ceilometer-central-agent" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.064405 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerName="ceilometer-central-agent" Sep 30 00:31:42 crc kubenswrapper[4809]: E0930 00:31:42.064424 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f81f791a-b60f-4d43-b645-47308146aac2" containerName="nova-cell0-conductor-db-sync" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.064431 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="f81f791a-b60f-4d43-b645-47308146aac2" containerName="nova-cell0-conductor-db-sync" Sep 30 00:31:42 crc kubenswrapper[4809]: E0930 00:31:42.064447 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30ff7679-5c63-4bb2-a427-90410586d459" containerName="heat-engine" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.064452 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="30ff7679-5c63-4bb2-a427-90410586d459" containerName="heat-engine" Sep 30 00:31:42 crc kubenswrapper[4809]: E0930 00:31:42.064467 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerName="sg-core" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.064474 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerName="sg-core" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.064737 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerName="ceilometer-central-agent" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.064751 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="f81f791a-b60f-4d43-b645-47308146aac2" containerName="nova-cell0-conductor-db-sync" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.064761 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerName="sg-core" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.064776 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="30ff7679-5c63-4bb2-a427-90410586d459" containerName="heat-engine" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.064785 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerName="proxy-httpd" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.064794 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9caa3de3-3003-482b-832d-c2a1207cd2e7" containerName="ceilometer-notification-agent" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.066664 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.072547 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.074172 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.074561 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.127383 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.132553 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.136216 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.136537 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-q6p9b" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.139921 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.181256 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbg92\" (UniqueName: \"kubernetes.io/projected/67594e0a-af59-4b84-8713-176fb13ca209-kube-api-access-rbg92\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.181316 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-config-data\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.181396 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/67594e0a-af59-4b84-8713-176fb13ca209-run-httpd\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.181445 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/67594e0a-af59-4b84-8713-176fb13ca209-log-httpd\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.181488 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.181557 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.181661 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-scripts\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.283337 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbg92\" (UniqueName: \"kubernetes.io/projected/67594e0a-af59-4b84-8713-176fb13ca209-kube-api-access-rbg92\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.283405 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1137ef5b-0b42-40e7-8591-443e77c14e83-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"1137ef5b-0b42-40e7-8591-443e77c14e83\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.283449 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-config-data\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.283489 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/67594e0a-af59-4b84-8713-176fb13ca209-run-httpd\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.283509 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/67594e0a-af59-4b84-8713-176fb13ca209-log-httpd\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.283764 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.283849 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1137ef5b-0b42-40e7-8591-443e77c14e83-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"1137ef5b-0b42-40e7-8591-443e77c14e83\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.283894 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jb5mg\" (UniqueName: \"kubernetes.io/projected/1137ef5b-0b42-40e7-8591-443e77c14e83-kube-api-access-jb5mg\") pod \"nova-cell0-conductor-0\" (UID: \"1137ef5b-0b42-40e7-8591-443e77c14e83\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.283981 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.283988 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/67594e0a-af59-4b84-8713-176fb13ca209-run-httpd\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.283997 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/67594e0a-af59-4b84-8713-176fb13ca209-log-httpd\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.284256 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-scripts\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.288696 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.288927 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.289114 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-config-data\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.294301 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-scripts\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.301402 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbg92\" (UniqueName: \"kubernetes.io/projected/67594e0a-af59-4b84-8713-176fb13ca209-kube-api-access-rbg92\") pod \"ceilometer-0\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.386584 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1137ef5b-0b42-40e7-8591-443e77c14e83-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"1137ef5b-0b42-40e7-8591-443e77c14e83\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.386696 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1137ef5b-0b42-40e7-8591-443e77c14e83-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"1137ef5b-0b42-40e7-8591-443e77c14e83\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.386719 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jb5mg\" (UniqueName: \"kubernetes.io/projected/1137ef5b-0b42-40e7-8591-443e77c14e83-kube-api-access-jb5mg\") pod \"nova-cell0-conductor-0\" (UID: \"1137ef5b-0b42-40e7-8591-443e77c14e83\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.387033 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.393225 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1137ef5b-0b42-40e7-8591-443e77c14e83-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"1137ef5b-0b42-40e7-8591-443e77c14e83\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.393439 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1137ef5b-0b42-40e7-8591-443e77c14e83-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"1137ef5b-0b42-40e7-8591-443e77c14e83\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.402731 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jb5mg\" (UniqueName: \"kubernetes.io/projected/1137ef5b-0b42-40e7-8591-443e77c14e83-kube-api-access-jb5mg\") pod \"nova-cell0-conductor-0\" (UID: \"1137ef5b-0b42-40e7-8591-443e77c14e83\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.464119 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.831168 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.958016 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 00:31:42 crc kubenswrapper[4809]: W0930 00:31:42.964522 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1137ef5b_0b42_40e7_8591_443e77c14e83.slice/crio-cb126017afaab81bad95bac3e4d523c03afe30325e677505704ecefef3fa61ca WatchSource:0}: Error finding container cb126017afaab81bad95bac3e4d523c03afe30325e677505704ecefef3fa61ca: Status 404 returned error can't find the container with id cb126017afaab81bad95bac3e4d523c03afe30325e677505704ecefef3fa61ca Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.987244 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"67594e0a-af59-4b84-8713-176fb13ca209","Type":"ContainerStarted","Data":"114ca9dbc79651a995a5c3fc03853e8e9a741c9d3fbba9289b77d4e2c171ef19"} Sep 30 00:31:42 crc kubenswrapper[4809]: I0930 00:31:42.989383 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"1137ef5b-0b42-40e7-8591-443e77c14e83","Type":"ContainerStarted","Data":"cb126017afaab81bad95bac3e4d523c03afe30325e677505704ecefef3fa61ca"} Sep 30 00:31:43 crc kubenswrapper[4809]: I0930 00:31:43.034113 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-create-572xr"] Sep 30 00:31:43 crc kubenswrapper[4809]: I0930 00:31:43.036070 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-572xr" Sep 30 00:31:43 crc kubenswrapper[4809]: I0930 00:31:43.046179 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-572xr"] Sep 30 00:31:43 crc kubenswrapper[4809]: I0930 00:31:43.106871 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rgtq\" (UniqueName: \"kubernetes.io/projected/510b1218-ee5e-4ab3-a2a9-ade90df3466b-kube-api-access-9rgtq\") pod \"aodh-db-create-572xr\" (UID: \"510b1218-ee5e-4ab3-a2a9-ade90df3466b\") " pod="openstack/aodh-db-create-572xr" Sep 30 00:31:43 crc kubenswrapper[4809]: I0930 00:31:43.211135 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rgtq\" (UniqueName: \"kubernetes.io/projected/510b1218-ee5e-4ab3-a2a9-ade90df3466b-kube-api-access-9rgtq\") pod \"aodh-db-create-572xr\" (UID: \"510b1218-ee5e-4ab3-a2a9-ade90df3466b\") " pod="openstack/aodh-db-create-572xr" Sep 30 00:31:43 crc kubenswrapper[4809]: I0930 00:31:43.232932 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rgtq\" (UniqueName: \"kubernetes.io/projected/510b1218-ee5e-4ab3-a2a9-ade90df3466b-kube-api-access-9rgtq\") pod \"aodh-db-create-572xr\" (UID: \"510b1218-ee5e-4ab3-a2a9-ade90df3466b\") " pod="openstack/aodh-db-create-572xr" Sep 30 00:31:43 crc kubenswrapper[4809]: I0930 00:31:43.465141 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-572xr" Sep 30 00:31:43 crc kubenswrapper[4809]: I0930 00:31:43.712441 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9caa3de3-3003-482b-832d-c2a1207cd2e7" path="/var/lib/kubelet/pods/9caa3de3-3003-482b-832d-c2a1207cd2e7/volumes" Sep 30 00:31:44 crc kubenswrapper[4809]: I0930 00:31:44.001116 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"1137ef5b-0b42-40e7-8591-443e77c14e83","Type":"ContainerStarted","Data":"dd809cb1cd51a56fa0fea5c2b64154bf650b762377772536b25d00355e37d3d3"} Sep 30 00:31:44 crc kubenswrapper[4809]: I0930 00:31:44.001762 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 30 00:31:44 crc kubenswrapper[4809]: I0930 00:31:44.002479 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"67594e0a-af59-4b84-8713-176fb13ca209","Type":"ContainerStarted","Data":"3d51af67b94bee2044fa7c3079ea816b42f4cd4c3af3750d932aeeb6bf2eb63f"} Sep 30 00:31:44 crc kubenswrapper[4809]: I0930 00:31:44.019683 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.019663244 podStartE2EDuration="2.019663244s" podCreationTimestamp="2025-09-30 00:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:31:44.017195306 +0000 UTC m=+1355.053444724" watchObservedRunningTime="2025-09-30 00:31:44.019663244 +0000 UTC m=+1355.055912662" Sep 30 00:31:44 crc kubenswrapper[4809]: I0930 00:31:44.103331 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-572xr"] Sep 30 00:31:44 crc kubenswrapper[4809]: W0930 00:31:44.113241 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod510b1218_ee5e_4ab3_a2a9_ade90df3466b.slice/crio-0433c21a64d4f81ebcfa91c09dce428d1ca3af6350ac85e612bc1ce91be3bf45 WatchSource:0}: Error finding container 0433c21a64d4f81ebcfa91c09dce428d1ca3af6350ac85e612bc1ce91be3bf45: Status 404 returned error can't find the container with id 0433c21a64d4f81ebcfa91c09dce428d1ca3af6350ac85e612bc1ce91be3bf45 Sep 30 00:31:45 crc kubenswrapper[4809]: I0930 00:31:45.013315 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"67594e0a-af59-4b84-8713-176fb13ca209","Type":"ContainerStarted","Data":"c427ea505545f20e87e6fbef1a08bd14b856926e89d54c5a639988436c04283d"} Sep 30 00:31:45 crc kubenswrapper[4809]: I0930 00:31:45.015495 4809 generic.go:334] "Generic (PLEG): container finished" podID="510b1218-ee5e-4ab3-a2a9-ade90df3466b" containerID="0bf2f4633116c7e689d365153c5d34e0c42e77184ad0f351762524f45a81588c" exitCode=0 Sep 30 00:31:45 crc kubenswrapper[4809]: I0930 00:31:45.015538 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-572xr" event={"ID":"510b1218-ee5e-4ab3-a2a9-ade90df3466b","Type":"ContainerDied","Data":"0bf2f4633116c7e689d365153c5d34e0c42e77184ad0f351762524f45a81588c"} Sep 30 00:31:45 crc kubenswrapper[4809]: I0930 00:31:45.015567 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-572xr" event={"ID":"510b1218-ee5e-4ab3-a2a9-ade90df3466b","Type":"ContainerStarted","Data":"0433c21a64d4f81ebcfa91c09dce428d1ca3af6350ac85e612bc1ce91be3bf45"} Sep 30 00:31:46 crc kubenswrapper[4809]: I0930 00:31:46.035747 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"67594e0a-af59-4b84-8713-176fb13ca209","Type":"ContainerStarted","Data":"90cb7f354da625769ff30571abba954bef8bbc57f04906ec8c34b453f33fa790"} Sep 30 00:31:46 crc kubenswrapper[4809]: I0930 00:31:46.702694 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-572xr" Sep 30 00:31:46 crc kubenswrapper[4809]: I0930 00:31:46.793417 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rgtq\" (UniqueName: \"kubernetes.io/projected/510b1218-ee5e-4ab3-a2a9-ade90df3466b-kube-api-access-9rgtq\") pod \"510b1218-ee5e-4ab3-a2a9-ade90df3466b\" (UID: \"510b1218-ee5e-4ab3-a2a9-ade90df3466b\") " Sep 30 00:31:46 crc kubenswrapper[4809]: I0930 00:31:46.801858 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/510b1218-ee5e-4ab3-a2a9-ade90df3466b-kube-api-access-9rgtq" (OuterVolumeSpecName: "kube-api-access-9rgtq") pod "510b1218-ee5e-4ab3-a2a9-ade90df3466b" (UID: "510b1218-ee5e-4ab3-a2a9-ade90df3466b"). InnerVolumeSpecName "kube-api-access-9rgtq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:31:46 crc kubenswrapper[4809]: I0930 00:31:46.896285 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rgtq\" (UniqueName: \"kubernetes.io/projected/510b1218-ee5e-4ab3-a2a9-ade90df3466b-kube-api-access-9rgtq\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:47 crc kubenswrapper[4809]: I0930 00:31:47.055759 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"67594e0a-af59-4b84-8713-176fb13ca209","Type":"ContainerStarted","Data":"839cdaf9460d9f2a2a49eb8eb85fc444cecb6412ae509e32f197411a35d4e5d0"} Sep 30 00:31:47 crc kubenswrapper[4809]: I0930 00:31:47.056092 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 00:31:47 crc kubenswrapper[4809]: I0930 00:31:47.057929 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-572xr" event={"ID":"510b1218-ee5e-4ab3-a2a9-ade90df3466b","Type":"ContainerDied","Data":"0433c21a64d4f81ebcfa91c09dce428d1ca3af6350ac85e612bc1ce91be3bf45"} Sep 30 00:31:47 crc kubenswrapper[4809]: I0930 00:31:47.057974 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0433c21a64d4f81ebcfa91c09dce428d1ca3af6350ac85e612bc1ce91be3bf45" Sep 30 00:31:47 crc kubenswrapper[4809]: I0930 00:31:47.057949 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-572xr" Sep 30 00:31:47 crc kubenswrapper[4809]: I0930 00:31:47.080613 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.3599568419999999 podStartE2EDuration="5.080596607s" podCreationTimestamp="2025-09-30 00:31:42 +0000 UTC" firstStartedPulling="2025-09-30 00:31:42.843339838 +0000 UTC m=+1353.879589246" lastFinishedPulling="2025-09-30 00:31:46.563979603 +0000 UTC m=+1357.600229011" observedRunningTime="2025-09-30 00:31:47.079402185 +0000 UTC m=+1358.115651583" watchObservedRunningTime="2025-09-30 00:31:47.080596607 +0000 UTC m=+1358.116846015" Sep 30 00:31:52 crc kubenswrapper[4809]: I0930 00:31:52.517706 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.028363 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-f56b-account-create-wfnjk"] Sep 30 00:31:53 crc kubenswrapper[4809]: E0930 00:31:53.028881 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="510b1218-ee5e-4ab3-a2a9-ade90df3466b" containerName="mariadb-database-create" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.028905 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="510b1218-ee5e-4ab3-a2a9-ade90df3466b" containerName="mariadb-database-create" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.029158 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="510b1218-ee5e-4ab3-a2a9-ade90df3466b" containerName="mariadb-database-create" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.030044 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-f56b-account-create-wfnjk" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.031972 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.041506 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-f56b-account-create-wfnjk"] Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.055731 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzf25\" (UniqueName: \"kubernetes.io/projected/76dd894e-8637-49ec-953e-6e5aa1ee4e21-kube-api-access-dzf25\") pod \"aodh-f56b-account-create-wfnjk\" (UID: \"76dd894e-8637-49ec-953e-6e5aa1ee4e21\") " pod="openstack/aodh-f56b-account-create-wfnjk" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.148588 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-cvhd7"] Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.150345 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-cvhd7" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.152732 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.155285 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.156980 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzf25\" (UniqueName: \"kubernetes.io/projected/76dd894e-8637-49ec-953e-6e5aa1ee4e21-kube-api-access-dzf25\") pod \"aodh-f56b-account-create-wfnjk\" (UID: \"76dd894e-8637-49ec-953e-6e5aa1ee4e21\") " pod="openstack/aodh-f56b-account-create-wfnjk" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.159567 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-cvhd7"] Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.191937 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzf25\" (UniqueName: \"kubernetes.io/projected/76dd894e-8637-49ec-953e-6e5aa1ee4e21-kube-api-access-dzf25\") pod \"aodh-f56b-account-create-wfnjk\" (UID: \"76dd894e-8637-49ec-953e-6e5aa1ee4e21\") " pod="openstack/aodh-f56b-account-create-wfnjk" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.258987 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14b3d277-31b4-4f12-b360-aeabe9420f33-scripts\") pod \"nova-cell0-cell-mapping-cvhd7\" (UID: \"14b3d277-31b4-4f12-b360-aeabe9420f33\") " pod="openstack/nova-cell0-cell-mapping-cvhd7" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.259078 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14b3d277-31b4-4f12-b360-aeabe9420f33-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-cvhd7\" (UID: \"14b3d277-31b4-4f12-b360-aeabe9420f33\") " pod="openstack/nova-cell0-cell-mapping-cvhd7" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.259106 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk9p2\" (UniqueName: \"kubernetes.io/projected/14b3d277-31b4-4f12-b360-aeabe9420f33-kube-api-access-gk9p2\") pod \"nova-cell0-cell-mapping-cvhd7\" (UID: \"14b3d277-31b4-4f12-b360-aeabe9420f33\") " pod="openstack/nova-cell0-cell-mapping-cvhd7" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.259150 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14b3d277-31b4-4f12-b360-aeabe9420f33-config-data\") pod \"nova-cell0-cell-mapping-cvhd7\" (UID: \"14b3d277-31b4-4f12-b360-aeabe9420f33\") " pod="openstack/nova-cell0-cell-mapping-cvhd7" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.297146 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.298686 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.300362 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.310940 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.356424 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-f56b-account-create-wfnjk" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.360742 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14b3d277-31b4-4f12-b360-aeabe9420f33-config-data\") pod \"nova-cell0-cell-mapping-cvhd7\" (UID: \"14b3d277-31b4-4f12-b360-aeabe9420f33\") " pod="openstack/nova-cell0-cell-mapping-cvhd7" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.360919 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14b3d277-31b4-4f12-b360-aeabe9420f33-scripts\") pod \"nova-cell0-cell-mapping-cvhd7\" (UID: \"14b3d277-31b4-4f12-b360-aeabe9420f33\") " pod="openstack/nova-cell0-cell-mapping-cvhd7" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.361020 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14b3d277-31b4-4f12-b360-aeabe9420f33-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-cvhd7\" (UID: \"14b3d277-31b4-4f12-b360-aeabe9420f33\") " pod="openstack/nova-cell0-cell-mapping-cvhd7" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.361057 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk9p2\" (UniqueName: \"kubernetes.io/projected/14b3d277-31b4-4f12-b360-aeabe9420f33-kube-api-access-gk9p2\") pod \"nova-cell0-cell-mapping-cvhd7\" (UID: \"14b3d277-31b4-4f12-b360-aeabe9420f33\") " pod="openstack/nova-cell0-cell-mapping-cvhd7" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.371355 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14b3d277-31b4-4f12-b360-aeabe9420f33-config-data\") pod \"nova-cell0-cell-mapping-cvhd7\" (UID: \"14b3d277-31b4-4f12-b360-aeabe9420f33\") " pod="openstack/nova-cell0-cell-mapping-cvhd7" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.375235 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14b3d277-31b4-4f12-b360-aeabe9420f33-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-cvhd7\" (UID: \"14b3d277-31b4-4f12-b360-aeabe9420f33\") " pod="openstack/nova-cell0-cell-mapping-cvhd7" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.383950 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14b3d277-31b4-4f12-b360-aeabe9420f33-scripts\") pod \"nova-cell0-cell-mapping-cvhd7\" (UID: \"14b3d277-31b4-4f12-b360-aeabe9420f33\") " pod="openstack/nova-cell0-cell-mapping-cvhd7" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.385300 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk9p2\" (UniqueName: \"kubernetes.io/projected/14b3d277-31b4-4f12-b360-aeabe9420f33-kube-api-access-gk9p2\") pod \"nova-cell0-cell-mapping-cvhd7\" (UID: \"14b3d277-31b4-4f12-b360-aeabe9420f33\") " pod="openstack/nova-cell0-cell-mapping-cvhd7" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.423865 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.427278 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.432734 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.462945 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/067ed8b2-f745-47ad-b207-5ac441bc7510-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"067ed8b2-f745-47ad-b207-5ac441bc7510\") " pod="openstack/nova-scheduler-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.463009 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/067ed8b2-f745-47ad-b207-5ac441bc7510-config-data\") pod \"nova-scheduler-0\" (UID: \"067ed8b2-f745-47ad-b207-5ac441bc7510\") " pod="openstack/nova-scheduler-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.463144 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clklh\" (UniqueName: \"kubernetes.io/projected/067ed8b2-f745-47ad-b207-5ac441bc7510-kube-api-access-clklh\") pod \"nova-scheduler-0\" (UID: \"067ed8b2-f745-47ad-b207-5ac441bc7510\") " pod="openstack/nova-scheduler-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.469777 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-cvhd7" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.498721 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.564870 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c748de1-fa7d-41b8-9a1c-5fb693750edc-config-data\") pod \"nova-metadata-0\" (UID: \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\") " pod="openstack/nova-metadata-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.564934 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clklh\" (UniqueName: \"kubernetes.io/projected/067ed8b2-f745-47ad-b207-5ac441bc7510-kube-api-access-clklh\") pod \"nova-scheduler-0\" (UID: \"067ed8b2-f745-47ad-b207-5ac441bc7510\") " pod="openstack/nova-scheduler-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.564970 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c748de1-fa7d-41b8-9a1c-5fb693750edc-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\") " pod="openstack/nova-metadata-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.565014 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/067ed8b2-f745-47ad-b207-5ac441bc7510-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"067ed8b2-f745-47ad-b207-5ac441bc7510\") " pod="openstack/nova-scheduler-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.565035 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c748de1-fa7d-41b8-9a1c-5fb693750edc-logs\") pod \"nova-metadata-0\" (UID: \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\") " pod="openstack/nova-metadata-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.565063 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8mk7\" (UniqueName: \"kubernetes.io/projected/4c748de1-fa7d-41b8-9a1c-5fb693750edc-kube-api-access-w8mk7\") pod \"nova-metadata-0\" (UID: \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\") " pod="openstack/nova-metadata-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.565084 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/067ed8b2-f745-47ad-b207-5ac441bc7510-config-data\") pod \"nova-scheduler-0\" (UID: \"067ed8b2-f745-47ad-b207-5ac441bc7510\") " pod="openstack/nova-scheduler-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.572961 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.575462 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.576336 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/067ed8b2-f745-47ad-b207-5ac441bc7510-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"067ed8b2-f745-47ad-b207-5ac441bc7510\") " pod="openstack/nova-scheduler-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.580944 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/067ed8b2-f745-47ad-b207-5ac441bc7510-config-data\") pod \"nova-scheduler-0\" (UID: \"067ed8b2-f745-47ad-b207-5ac441bc7510\") " pod="openstack/nova-scheduler-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.595816 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.596565 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.615414 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clklh\" (UniqueName: \"kubernetes.io/projected/067ed8b2-f745-47ad-b207-5ac441bc7510-kube-api-access-clklh\") pod \"nova-scheduler-0\" (UID: \"067ed8b2-f745-47ad-b207-5ac441bc7510\") " pod="openstack/nova-scheduler-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.622099 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.623829 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.625292 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.632617 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.670480 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5fbc4d444f-gjm9z"] Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.671953 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.679377 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c748de1-fa7d-41b8-9a1c-5fb693750edc-config-data\") pod \"nova-metadata-0\" (UID: \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\") " pod="openstack/nova-metadata-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.679437 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnggx\" (UniqueName: \"kubernetes.io/projected/4e24c1d3-e30e-4b8b-a034-490cdb943da8-kube-api-access-rnggx\") pod \"dnsmasq-dns-5fbc4d444f-gjm9z\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.679470 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-dns-swift-storage-0\") pod \"dnsmasq-dns-5fbc4d444f-gjm9z\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.679601 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-dns-svc\") pod \"dnsmasq-dns-5fbc4d444f-gjm9z\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.679667 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-config\") pod \"dnsmasq-dns-5fbc4d444f-gjm9z\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.679686 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c748de1-fa7d-41b8-9a1c-5fb693750edc-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\") " pod="openstack/nova-metadata-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.679848 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d59630e-01bf-4c23-b523-85e077e9d828-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8d59630e-01bf-4c23-b523-85e077e9d828\") " pod="openstack/nova-api-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.679879 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c748de1-fa7d-41b8-9a1c-5fb693750edc-logs\") pod \"nova-metadata-0\" (UID: \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\") " pod="openstack/nova-metadata-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.680016 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d59630e-01bf-4c23-b523-85e077e9d828-config-data\") pod \"nova-api-0\" (UID: \"8d59630e-01bf-4c23-b523-85e077e9d828\") " pod="openstack/nova-api-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.680055 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-ovsdbserver-nb\") pod \"dnsmasq-dns-5fbc4d444f-gjm9z\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.680122 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8mk7\" (UniqueName: \"kubernetes.io/projected/4c748de1-fa7d-41b8-9a1c-5fb693750edc-kube-api-access-w8mk7\") pod \"nova-metadata-0\" (UID: \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\") " pod="openstack/nova-metadata-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.680262 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20ef09e9-09d7-4dd4-9987-4f0b7cbc292b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"20ef09e9-09d7-4dd4-9987-4f0b7cbc292b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.680396 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-ovsdbserver-sb\") pod \"dnsmasq-dns-5fbc4d444f-gjm9z\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.685493 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d59630e-01bf-4c23-b523-85e077e9d828-logs\") pod \"nova-api-0\" (UID: \"8d59630e-01bf-4c23-b523-85e077e9d828\") " pod="openstack/nova-api-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.685568 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20ef09e9-09d7-4dd4-9987-4f0b7cbc292b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"20ef09e9-09d7-4dd4-9987-4f0b7cbc292b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.685659 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lhzw\" (UniqueName: \"kubernetes.io/projected/20ef09e9-09d7-4dd4-9987-4f0b7cbc292b-kube-api-access-5lhzw\") pod \"nova-cell1-novncproxy-0\" (UID: \"20ef09e9-09d7-4dd4-9987-4f0b7cbc292b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.685699 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psjxk\" (UniqueName: \"kubernetes.io/projected/8d59630e-01bf-4c23-b523-85e077e9d828-kube-api-access-psjxk\") pod \"nova-api-0\" (UID: \"8d59630e-01bf-4c23-b523-85e077e9d828\") " pod="openstack/nova-api-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.686191 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c748de1-fa7d-41b8-9a1c-5fb693750edc-logs\") pod \"nova-metadata-0\" (UID: \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\") " pod="openstack/nova-metadata-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.689329 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c748de1-fa7d-41b8-9a1c-5fb693750edc-config-data\") pod \"nova-metadata-0\" (UID: \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\") " pod="openstack/nova-metadata-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.716914 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c748de1-fa7d-41b8-9a1c-5fb693750edc-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\") " pod="openstack/nova-metadata-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.731315 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8mk7\" (UniqueName: \"kubernetes.io/projected/4c748de1-fa7d-41b8-9a1c-5fb693750edc-kube-api-access-w8mk7\") pod \"nova-metadata-0\" (UID: \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\") " pod="openstack/nova-metadata-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.803500 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-dns-swift-storage-0\") pod \"dnsmasq-dns-5fbc4d444f-gjm9z\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.803831 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-dns-svc\") pod \"dnsmasq-dns-5fbc4d444f-gjm9z\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.803880 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-config\") pod \"dnsmasq-dns-5fbc4d444f-gjm9z\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.803933 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d59630e-01bf-4c23-b523-85e077e9d828-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8d59630e-01bf-4c23-b523-85e077e9d828\") " pod="openstack/nova-api-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.803978 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d59630e-01bf-4c23-b523-85e077e9d828-config-data\") pod \"nova-api-0\" (UID: \"8d59630e-01bf-4c23-b523-85e077e9d828\") " pod="openstack/nova-api-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.803999 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-ovsdbserver-nb\") pod \"dnsmasq-dns-5fbc4d444f-gjm9z\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.804024 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20ef09e9-09d7-4dd4-9987-4f0b7cbc292b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"20ef09e9-09d7-4dd4-9987-4f0b7cbc292b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.804093 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-ovsdbserver-sb\") pod \"dnsmasq-dns-5fbc4d444f-gjm9z\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.804115 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d59630e-01bf-4c23-b523-85e077e9d828-logs\") pod \"nova-api-0\" (UID: \"8d59630e-01bf-4c23-b523-85e077e9d828\") " pod="openstack/nova-api-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.804283 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20ef09e9-09d7-4dd4-9987-4f0b7cbc292b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"20ef09e9-09d7-4dd4-9987-4f0b7cbc292b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.804350 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lhzw\" (UniqueName: \"kubernetes.io/projected/20ef09e9-09d7-4dd4-9987-4f0b7cbc292b-kube-api-access-5lhzw\") pod \"nova-cell1-novncproxy-0\" (UID: \"20ef09e9-09d7-4dd4-9987-4f0b7cbc292b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.804379 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psjxk\" (UniqueName: \"kubernetes.io/projected/8d59630e-01bf-4c23-b523-85e077e9d828-kube-api-access-psjxk\") pod \"nova-api-0\" (UID: \"8d59630e-01bf-4c23-b523-85e077e9d828\") " pod="openstack/nova-api-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.804438 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnggx\" (UniqueName: \"kubernetes.io/projected/4e24c1d3-e30e-4b8b-a034-490cdb943da8-kube-api-access-rnggx\") pod \"dnsmasq-dns-5fbc4d444f-gjm9z\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.805918 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-dns-swift-storage-0\") pod \"dnsmasq-dns-5fbc4d444f-gjm9z\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.809033 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-config\") pod \"dnsmasq-dns-5fbc4d444f-gjm9z\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.810280 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-dns-svc\") pod \"dnsmasq-dns-5fbc4d444f-gjm9z\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.811369 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-ovsdbserver-sb\") pod \"dnsmasq-dns-5fbc4d444f-gjm9z\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.814743 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-ovsdbserver-nb\") pod \"dnsmasq-dns-5fbc4d444f-gjm9z\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.815915 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.815944 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5fbc4d444f-gjm9z"] Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.816387 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d59630e-01bf-4c23-b523-85e077e9d828-logs\") pod \"nova-api-0\" (UID: \"8d59630e-01bf-4c23-b523-85e077e9d828\") " pod="openstack/nova-api-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.817063 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d59630e-01bf-4c23-b523-85e077e9d828-config-data\") pod \"nova-api-0\" (UID: \"8d59630e-01bf-4c23-b523-85e077e9d828\") " pod="openstack/nova-api-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.820987 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20ef09e9-09d7-4dd4-9987-4f0b7cbc292b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"20ef09e9-09d7-4dd4-9987-4f0b7cbc292b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.841430 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnggx\" (UniqueName: \"kubernetes.io/projected/4e24c1d3-e30e-4b8b-a034-490cdb943da8-kube-api-access-rnggx\") pod \"dnsmasq-dns-5fbc4d444f-gjm9z\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.843201 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lhzw\" (UniqueName: \"kubernetes.io/projected/20ef09e9-09d7-4dd4-9987-4f0b7cbc292b-kube-api-access-5lhzw\") pod \"nova-cell1-novncproxy-0\" (UID: \"20ef09e9-09d7-4dd4-9987-4f0b7cbc292b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.847708 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20ef09e9-09d7-4dd4-9987-4f0b7cbc292b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"20ef09e9-09d7-4dd4-9987-4f0b7cbc292b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.847737 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d59630e-01bf-4c23-b523-85e077e9d828-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8d59630e-01bf-4c23-b523-85e077e9d828\") " pod="openstack/nova-api-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.850719 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psjxk\" (UniqueName: \"kubernetes.io/projected/8d59630e-01bf-4c23-b523-85e077e9d828-kube-api-access-psjxk\") pod \"nova-api-0\" (UID: \"8d59630e-01bf-4c23-b523-85e077e9d828\") " pod="openstack/nova-api-0" Sep 30 00:31:53 crc kubenswrapper[4809]: I0930 00:31:53.948768 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.017431 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.058072 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.108082 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.156351 4809 generic.go:334] "Generic (PLEG): container finished" podID="107d7545-41aa-471a-86e9-aa7e557f8faa" containerID="339ee0eb9d2cdb95d4c179b15ee534f418a9351af26be753336c842e5cf078a7" exitCode=137 Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.156389 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-c5884cff9-92mmk" event={"ID":"107d7545-41aa-471a-86e9-aa7e557f8faa","Type":"ContainerDied","Data":"339ee0eb9d2cdb95d4c179b15ee534f418a9351af26be753336c842e5cf078a7"} Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.426261 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-f56b-account-create-wfnjk"] Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.448521 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-cvhd7"] Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.505706 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-zvndc"] Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.507067 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-zvndc" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.509889 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.509997 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.523160 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-zvndc"] Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.632577 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-c5884cff9-92mmk" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.641076 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7x8x\" (UniqueName: \"kubernetes.io/projected/e2f48302-4af1-49a1-948a-086de787a0c4-kube-api-access-d7x8x\") pod \"nova-cell1-conductor-db-sync-zvndc\" (UID: \"e2f48302-4af1-49a1-948a-086de787a0c4\") " pod="openstack/nova-cell1-conductor-db-sync-zvndc" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.641182 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2f48302-4af1-49a1-948a-086de787a0c4-scripts\") pod \"nova-cell1-conductor-db-sync-zvndc\" (UID: \"e2f48302-4af1-49a1-948a-086de787a0c4\") " pod="openstack/nova-cell1-conductor-db-sync-zvndc" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.641258 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2f48302-4af1-49a1-948a-086de787a0c4-config-data\") pod \"nova-cell1-conductor-db-sync-zvndc\" (UID: \"e2f48302-4af1-49a1-948a-086de787a0c4\") " pod="openstack/nova-cell1-conductor-db-sync-zvndc" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.641306 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2f48302-4af1-49a1-948a-086de787a0c4-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-zvndc\" (UID: \"e2f48302-4af1-49a1-948a-086de787a0c4\") " pod="openstack/nova-cell1-conductor-db-sync-zvndc" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.742709 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/107d7545-41aa-471a-86e9-aa7e557f8faa-config-data\") pod \"107d7545-41aa-471a-86e9-aa7e557f8faa\" (UID: \"107d7545-41aa-471a-86e9-aa7e557f8faa\") " Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.742891 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/107d7545-41aa-471a-86e9-aa7e557f8faa-combined-ca-bundle\") pod \"107d7545-41aa-471a-86e9-aa7e557f8faa\" (UID: \"107d7545-41aa-471a-86e9-aa7e557f8faa\") " Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.742916 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/107d7545-41aa-471a-86e9-aa7e557f8faa-config-data-custom\") pod \"107d7545-41aa-471a-86e9-aa7e557f8faa\" (UID: \"107d7545-41aa-471a-86e9-aa7e557f8faa\") " Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.742942 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfjzv\" (UniqueName: \"kubernetes.io/projected/107d7545-41aa-471a-86e9-aa7e557f8faa-kube-api-access-jfjzv\") pod \"107d7545-41aa-471a-86e9-aa7e557f8faa\" (UID: \"107d7545-41aa-471a-86e9-aa7e557f8faa\") " Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.743289 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7x8x\" (UniqueName: \"kubernetes.io/projected/e2f48302-4af1-49a1-948a-086de787a0c4-kube-api-access-d7x8x\") pod \"nova-cell1-conductor-db-sync-zvndc\" (UID: \"e2f48302-4af1-49a1-948a-086de787a0c4\") " pod="openstack/nova-cell1-conductor-db-sync-zvndc" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.743480 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2f48302-4af1-49a1-948a-086de787a0c4-scripts\") pod \"nova-cell1-conductor-db-sync-zvndc\" (UID: \"e2f48302-4af1-49a1-948a-086de787a0c4\") " pod="openstack/nova-cell1-conductor-db-sync-zvndc" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.743733 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2f48302-4af1-49a1-948a-086de787a0c4-config-data\") pod \"nova-cell1-conductor-db-sync-zvndc\" (UID: \"e2f48302-4af1-49a1-948a-086de787a0c4\") " pod="openstack/nova-cell1-conductor-db-sync-zvndc" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.744035 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2f48302-4af1-49a1-948a-086de787a0c4-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-zvndc\" (UID: \"e2f48302-4af1-49a1-948a-086de787a0c4\") " pod="openstack/nova-cell1-conductor-db-sync-zvndc" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.757429 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/107d7545-41aa-471a-86e9-aa7e557f8faa-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "107d7545-41aa-471a-86e9-aa7e557f8faa" (UID: "107d7545-41aa-471a-86e9-aa7e557f8faa"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.757821 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2f48302-4af1-49a1-948a-086de787a0c4-scripts\") pod \"nova-cell1-conductor-db-sync-zvndc\" (UID: \"e2f48302-4af1-49a1-948a-086de787a0c4\") " pod="openstack/nova-cell1-conductor-db-sync-zvndc" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.763899 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/107d7545-41aa-471a-86e9-aa7e557f8faa-kube-api-access-jfjzv" (OuterVolumeSpecName: "kube-api-access-jfjzv") pod "107d7545-41aa-471a-86e9-aa7e557f8faa" (UID: "107d7545-41aa-471a-86e9-aa7e557f8faa"). InnerVolumeSpecName "kube-api-access-jfjzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.765880 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2f48302-4af1-49a1-948a-086de787a0c4-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-zvndc\" (UID: \"e2f48302-4af1-49a1-948a-086de787a0c4\") " pod="openstack/nova-cell1-conductor-db-sync-zvndc" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.766347 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2f48302-4af1-49a1-948a-086de787a0c4-config-data\") pod \"nova-cell1-conductor-db-sync-zvndc\" (UID: \"e2f48302-4af1-49a1-948a-086de787a0c4\") " pod="openstack/nova-cell1-conductor-db-sync-zvndc" Sep 30 00:31:54 crc kubenswrapper[4809]: W0930 00:31:54.766431 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod067ed8b2_f745_47ad_b207_5ac441bc7510.slice/crio-10642a01c54e1b702bd173441f05bc9ffa1c025a6730740dc9fe2a16e555566a WatchSource:0}: Error finding container 10642a01c54e1b702bd173441f05bc9ffa1c025a6730740dc9fe2a16e555566a: Status 404 returned error can't find the container with id 10642a01c54e1b702bd173441f05bc9ffa1c025a6730740dc9fe2a16e555566a Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.769965 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7x8x\" (UniqueName: \"kubernetes.io/projected/e2f48302-4af1-49a1-948a-086de787a0c4-kube-api-access-d7x8x\") pod \"nova-cell1-conductor-db-sync-zvndc\" (UID: \"e2f48302-4af1-49a1-948a-086de787a0c4\") " pod="openstack/nova-cell1-conductor-db-sync-zvndc" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.777843 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.823746 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/107d7545-41aa-471a-86e9-aa7e557f8faa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "107d7545-41aa-471a-86e9-aa7e557f8faa" (UID: "107d7545-41aa-471a-86e9-aa7e557f8faa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.845819 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/107d7545-41aa-471a-86e9-aa7e557f8faa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.845840 4809 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/107d7545-41aa-471a-86e9-aa7e557f8faa-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.845850 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfjzv\" (UniqueName: \"kubernetes.io/projected/107d7545-41aa-471a-86e9-aa7e557f8faa-kube-api-access-jfjzv\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.849533 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/107d7545-41aa-471a-86e9-aa7e557f8faa-config-data" (OuterVolumeSpecName: "config-data") pod "107d7545-41aa-471a-86e9-aa7e557f8faa" (UID: "107d7545-41aa-471a-86e9-aa7e557f8faa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.898773 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-zvndc" Sep 30 00:31:54 crc kubenswrapper[4809]: I0930 00:31:54.947829 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/107d7545-41aa-471a-86e9-aa7e557f8faa-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:55 crc kubenswrapper[4809]: I0930 00:31:55.092106 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:31:55 crc kubenswrapper[4809]: I0930 00:31:55.160018 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:31:55 crc kubenswrapper[4809]: I0930 00:31:55.167974 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 00:31:55 crc kubenswrapper[4809]: I0930 00:31:55.181987 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"067ed8b2-f745-47ad-b207-5ac441bc7510","Type":"ContainerStarted","Data":"10642a01c54e1b702bd173441f05bc9ffa1c025a6730740dc9fe2a16e555566a"} Sep 30 00:31:55 crc kubenswrapper[4809]: I0930 00:31:55.186500 4809 generic.go:334] "Generic (PLEG): container finished" podID="76dd894e-8637-49ec-953e-6e5aa1ee4e21" containerID="2f7f82b0982ed33c4181afb150f83f258a6e4fb448e08836f6b12bf5785d8984" exitCode=0 Sep 30 00:31:55 crc kubenswrapper[4809]: I0930 00:31:55.186581 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-f56b-account-create-wfnjk" event={"ID":"76dd894e-8637-49ec-953e-6e5aa1ee4e21","Type":"ContainerDied","Data":"2f7f82b0982ed33c4181afb150f83f258a6e4fb448e08836f6b12bf5785d8984"} Sep 30 00:31:55 crc kubenswrapper[4809]: I0930 00:31:55.186650 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-f56b-account-create-wfnjk" event={"ID":"76dd894e-8637-49ec-953e-6e5aa1ee4e21","Type":"ContainerStarted","Data":"b1d8b60b07473e49f3be6068af68d42ebffbc713fbac33effd863b6c65c60fa3"} Sep 30 00:31:55 crc kubenswrapper[4809]: I0930 00:31:55.189662 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-c5884cff9-92mmk" Sep 30 00:31:55 crc kubenswrapper[4809]: I0930 00:31:55.189651 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-c5884cff9-92mmk" event={"ID":"107d7545-41aa-471a-86e9-aa7e557f8faa","Type":"ContainerDied","Data":"323501b65d68f1c3832687f1fe9ac1445c14e0b4367cac6963202e0015d93214"} Sep 30 00:31:55 crc kubenswrapper[4809]: I0930 00:31:55.189865 4809 scope.go:117] "RemoveContainer" containerID="339ee0eb9d2cdb95d4c179b15ee534f418a9351af26be753336c842e5cf078a7" Sep 30 00:31:55 crc kubenswrapper[4809]: I0930 00:31:55.208177 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4c748de1-fa7d-41b8-9a1c-5fb693750edc","Type":"ContainerStarted","Data":"3007833ac45cc1ab4a84f4ebedad54360e2e35b32b7efe2d784c00e5bbd79e9b"} Sep 30 00:31:55 crc kubenswrapper[4809]: I0930 00:31:55.218150 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5fbc4d444f-gjm9z"] Sep 30 00:31:55 crc kubenswrapper[4809]: I0930 00:31:55.231896 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-cvhd7" event={"ID":"14b3d277-31b4-4f12-b360-aeabe9420f33","Type":"ContainerStarted","Data":"242d241e4e0d2e4694ea04f4fb50d749755a51ed502d97869e24c0b2218f2fee"} Sep 30 00:31:55 crc kubenswrapper[4809]: I0930 00:31:55.231946 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-cvhd7" event={"ID":"14b3d277-31b4-4f12-b360-aeabe9420f33","Type":"ContainerStarted","Data":"f5aa4d20478facf2121187b59b487c7d296293529b510859c6f62d4f0542fa1c"} Sep 30 00:31:55 crc kubenswrapper[4809]: I0930 00:31:55.272078 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-cvhd7" podStartSLOduration=2.2720536989999998 podStartE2EDuration="2.272053699s" podCreationTimestamp="2025-09-30 00:31:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:31:55.250488 +0000 UTC m=+1366.286737408" watchObservedRunningTime="2025-09-30 00:31:55.272053699 +0000 UTC m=+1366.308303107" Sep 30 00:31:55 crc kubenswrapper[4809]: I0930 00:31:55.385045 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-c5884cff9-92mmk"] Sep 30 00:31:55 crc kubenswrapper[4809]: I0930 00:31:55.399686 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-api-c5884cff9-92mmk"] Sep 30 00:31:55 crc kubenswrapper[4809]: I0930 00:31:55.506361 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-zvndc"] Sep 30 00:31:55 crc kubenswrapper[4809]: I0930 00:31:55.709367 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="107d7545-41aa-471a-86e9-aa7e557f8faa" path="/var/lib/kubelet/pods/107d7545-41aa-471a-86e9-aa7e557f8faa/volumes" Sep 30 00:31:56 crc kubenswrapper[4809]: I0930 00:31:56.243771 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"20ef09e9-09d7-4dd4-9987-4f0b7cbc292b","Type":"ContainerStarted","Data":"903b36ce046a467d1a45673ffed5f0a2221afcd000dab36edaae23233233c358"} Sep 30 00:31:56 crc kubenswrapper[4809]: I0930 00:31:56.246394 4809 generic.go:334] "Generic (PLEG): container finished" podID="4e24c1d3-e30e-4b8b-a034-490cdb943da8" containerID="ba2a65bfe9dc75d84411d6997d8d45384b0913ded84b953c87ef7ec18c072b7a" exitCode=0 Sep 30 00:31:56 crc kubenswrapper[4809]: I0930 00:31:56.246439 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" event={"ID":"4e24c1d3-e30e-4b8b-a034-490cdb943da8","Type":"ContainerDied","Data":"ba2a65bfe9dc75d84411d6997d8d45384b0913ded84b953c87ef7ec18c072b7a"} Sep 30 00:31:56 crc kubenswrapper[4809]: I0930 00:31:56.246464 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" event={"ID":"4e24c1d3-e30e-4b8b-a034-490cdb943da8","Type":"ContainerStarted","Data":"21b0e391d10492ac591a57b15f8b6148899a79f0bb7b107aa3163bbf5995e866"} Sep 30 00:31:56 crc kubenswrapper[4809]: I0930 00:31:56.248967 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8d59630e-01bf-4c23-b523-85e077e9d828","Type":"ContainerStarted","Data":"47a7f5daf82fc43fc8a2e04c412a71218cb1e6496544d8798648595a0defb144"} Sep 30 00:31:56 crc kubenswrapper[4809]: I0930 00:31:56.262295 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-zvndc" event={"ID":"e2f48302-4af1-49a1-948a-086de787a0c4","Type":"ContainerStarted","Data":"eab730e58276147ed70d915f0c84500f12b6dca67d97368f0c7b708c7394f3c8"} Sep 30 00:31:56 crc kubenswrapper[4809]: I0930 00:31:56.262597 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-zvndc" event={"ID":"e2f48302-4af1-49a1-948a-086de787a0c4","Type":"ContainerStarted","Data":"e0c05eb9b06441d036f4e709ba3febfc19f6fef77b95d6ab209e2a8338c1bd72"} Sep 30 00:31:56 crc kubenswrapper[4809]: I0930 00:31:56.344439 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-zvndc" podStartSLOduration=2.344417854 podStartE2EDuration="2.344417854s" podCreationTimestamp="2025-09-30 00:31:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:31:56.299880297 +0000 UTC m=+1367.336129705" watchObservedRunningTime="2025-09-30 00:31:56.344417854 +0000 UTC m=+1367.380667262" Sep 30 00:31:57 crc kubenswrapper[4809]: I0930 00:31:57.088970 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-f56b-account-create-wfnjk" Sep 30 00:31:57 crc kubenswrapper[4809]: I0930 00:31:57.111266 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzf25\" (UniqueName: \"kubernetes.io/projected/76dd894e-8637-49ec-953e-6e5aa1ee4e21-kube-api-access-dzf25\") pod \"76dd894e-8637-49ec-953e-6e5aa1ee4e21\" (UID: \"76dd894e-8637-49ec-953e-6e5aa1ee4e21\") " Sep 30 00:31:57 crc kubenswrapper[4809]: I0930 00:31:57.133991 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76dd894e-8637-49ec-953e-6e5aa1ee4e21-kube-api-access-dzf25" (OuterVolumeSpecName: "kube-api-access-dzf25") pod "76dd894e-8637-49ec-953e-6e5aa1ee4e21" (UID: "76dd894e-8637-49ec-953e-6e5aa1ee4e21"). InnerVolumeSpecName "kube-api-access-dzf25". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:31:57 crc kubenswrapper[4809]: I0930 00:31:57.214007 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzf25\" (UniqueName: \"kubernetes.io/projected/76dd894e-8637-49ec-953e-6e5aa1ee4e21-kube-api-access-dzf25\") on node \"crc\" DevicePath \"\"" Sep 30 00:31:57 crc kubenswrapper[4809]: I0930 00:31:57.242725 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 00:31:57 crc kubenswrapper[4809]: I0930 00:31:57.252451 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:31:57 crc kubenswrapper[4809]: I0930 00:31:57.337560 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-f56b-account-create-wfnjk" Sep 30 00:31:57 crc kubenswrapper[4809]: I0930 00:31:57.337805 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-f56b-account-create-wfnjk" event={"ID":"76dd894e-8637-49ec-953e-6e5aa1ee4e21","Type":"ContainerDied","Data":"b1d8b60b07473e49f3be6068af68d42ebffbc713fbac33effd863b6c65c60fa3"} Sep 30 00:31:57 crc kubenswrapper[4809]: I0930 00:31:57.337853 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b1d8b60b07473e49f3be6068af68d42ebffbc713fbac33effd863b6c65c60fa3" Sep 30 00:31:59 crc kubenswrapper[4809]: I0930 00:31:59.358116 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"20ef09e9-09d7-4dd4-9987-4f0b7cbc292b","Type":"ContainerStarted","Data":"2305ae0fe0c64849828e6419855d64df5e0db1a9bfab6462590979798309e268"} Sep 30 00:31:59 crc kubenswrapper[4809]: I0930 00:31:59.358417 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="20ef09e9-09d7-4dd4-9987-4f0b7cbc292b" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://2305ae0fe0c64849828e6419855d64df5e0db1a9bfab6462590979798309e268" gracePeriod=30 Sep 30 00:31:59 crc kubenswrapper[4809]: I0930 00:31:59.361564 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" event={"ID":"4e24c1d3-e30e-4b8b-a034-490cdb943da8","Type":"ContainerStarted","Data":"34b1449cc7730f0567694ae66e51bb7b87b7f308c156ff37d189670b0131514d"} Sep 30 00:31:59 crc kubenswrapper[4809]: I0930 00:31:59.361886 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:31:59 crc kubenswrapper[4809]: I0930 00:31:59.365769 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8d59630e-01bf-4c23-b523-85e077e9d828","Type":"ContainerStarted","Data":"953f6ece01dae039e25ba7681a76b7c011daf87cedeaf43179abe74dc5f2912f"} Sep 30 00:31:59 crc kubenswrapper[4809]: I0930 00:31:59.365810 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8d59630e-01bf-4c23-b523-85e077e9d828","Type":"ContainerStarted","Data":"064e18ec859c753d0fe1315b1dded864bb4f2e2da08807c46c647370168035b2"} Sep 30 00:31:59 crc kubenswrapper[4809]: I0930 00:31:59.367505 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4c748de1-fa7d-41b8-9a1c-5fb693750edc","Type":"ContainerStarted","Data":"352ddfdd2e9b1c76ca0956db9ccc53fa5edf5ec0ffb26d41f29e29d9eaf60a2d"} Sep 30 00:31:59 crc kubenswrapper[4809]: I0930 00:31:59.367529 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4c748de1-fa7d-41b8-9a1c-5fb693750edc","Type":"ContainerStarted","Data":"a4b8125e5f380e3e4a2a673dc07bede603b6b2907445268b30b7ae8cc5a8d07e"} Sep 30 00:31:59 crc kubenswrapper[4809]: I0930 00:31:59.367654 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4c748de1-fa7d-41b8-9a1c-5fb693750edc" containerName="nova-metadata-log" containerID="cri-o://a4b8125e5f380e3e4a2a673dc07bede603b6b2907445268b30b7ae8cc5a8d07e" gracePeriod=30 Sep 30 00:31:59 crc kubenswrapper[4809]: I0930 00:31:59.367890 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4c748de1-fa7d-41b8-9a1c-5fb693750edc" containerName="nova-metadata-metadata" containerID="cri-o://352ddfdd2e9b1c76ca0956db9ccc53fa5edf5ec0ffb26d41f29e29d9eaf60a2d" gracePeriod=30 Sep 30 00:31:59 crc kubenswrapper[4809]: I0930 00:31:59.370801 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"067ed8b2-f745-47ad-b207-5ac441bc7510","Type":"ContainerStarted","Data":"f3759933b8497ce5f405fe055122ae39f258eda904bb478b0c3e7b1d72a8849a"} Sep 30 00:31:59 crc kubenswrapper[4809]: I0930 00:31:59.390134 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.129845398 podStartE2EDuration="6.390110924s" podCreationTimestamp="2025-09-30 00:31:53 +0000 UTC" firstStartedPulling="2025-09-30 00:31:55.20696877 +0000 UTC m=+1366.243218178" lastFinishedPulling="2025-09-30 00:31:58.467234286 +0000 UTC m=+1369.503483704" observedRunningTime="2025-09-30 00:31:59.377083258 +0000 UTC m=+1370.413332666" watchObservedRunningTime="2025-09-30 00:31:59.390110924 +0000 UTC m=+1370.426360322" Sep 30 00:31:59 crc kubenswrapper[4809]: I0930 00:31:59.409051 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.159891899 podStartE2EDuration="6.409032301s" podCreationTimestamp="2025-09-30 00:31:53 +0000 UTC" firstStartedPulling="2025-09-30 00:31:55.219877803 +0000 UTC m=+1366.256127211" lastFinishedPulling="2025-09-30 00:31:58.469018165 +0000 UTC m=+1369.505267613" observedRunningTime="2025-09-30 00:31:59.403895371 +0000 UTC m=+1370.440144779" watchObservedRunningTime="2025-09-30 00:31:59.409032301 +0000 UTC m=+1370.445281709" Sep 30 00:31:59 crc kubenswrapper[4809]: I0930 00:31:59.432419 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" podStartSLOduration=6.4323997 podStartE2EDuration="6.4323997s" podCreationTimestamp="2025-09-30 00:31:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:31:59.422976992 +0000 UTC m=+1370.459226400" watchObservedRunningTime="2025-09-30 00:31:59.4323997 +0000 UTC m=+1370.468649108" Sep 30 00:31:59 crc kubenswrapper[4809]: I0930 00:31:59.443203 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.068946414 podStartE2EDuration="6.443191155s" podCreationTimestamp="2025-09-30 00:31:53 +0000 UTC" firstStartedPulling="2025-09-30 00:31:55.094554557 +0000 UTC m=+1366.130803965" lastFinishedPulling="2025-09-30 00:31:58.468799298 +0000 UTC m=+1369.505048706" observedRunningTime="2025-09-30 00:31:59.440983795 +0000 UTC m=+1370.477233203" watchObservedRunningTime="2025-09-30 00:31:59.443191155 +0000 UTC m=+1370.479440563" Sep 30 00:31:59 crc kubenswrapper[4809]: I0930 00:31:59.465119 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.769433356 podStartE2EDuration="6.465101754s" podCreationTimestamp="2025-09-30 00:31:53 +0000 UTC" firstStartedPulling="2025-09-30 00:31:54.77237949 +0000 UTC m=+1365.808628898" lastFinishedPulling="2025-09-30 00:31:58.468047888 +0000 UTC m=+1369.504297296" observedRunningTime="2025-09-30 00:31:59.456108188 +0000 UTC m=+1370.492357596" watchObservedRunningTime="2025-09-30 00:31:59.465101754 +0000 UTC m=+1370.501351162" Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.061456 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.085298 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8mk7\" (UniqueName: \"kubernetes.io/projected/4c748de1-fa7d-41b8-9a1c-5fb693750edc-kube-api-access-w8mk7\") pod \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\" (UID: \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\") " Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.085530 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c748de1-fa7d-41b8-9a1c-5fb693750edc-logs\") pod \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\" (UID: \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\") " Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.085725 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c748de1-fa7d-41b8-9a1c-5fb693750edc-config-data\") pod \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\" (UID: \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\") " Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.085769 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c748de1-fa7d-41b8-9a1c-5fb693750edc-combined-ca-bundle\") pod \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\" (UID: \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\") " Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.085979 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c748de1-fa7d-41b8-9a1c-5fb693750edc-logs" (OuterVolumeSpecName: "logs") pod "4c748de1-fa7d-41b8-9a1c-5fb693750edc" (UID: "4c748de1-fa7d-41b8-9a1c-5fb693750edc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.086444 4809 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c748de1-fa7d-41b8-9a1c-5fb693750edc-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.095929 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c748de1-fa7d-41b8-9a1c-5fb693750edc-kube-api-access-w8mk7" (OuterVolumeSpecName: "kube-api-access-w8mk7") pod "4c748de1-fa7d-41b8-9a1c-5fb693750edc" (UID: "4c748de1-fa7d-41b8-9a1c-5fb693750edc"). InnerVolumeSpecName "kube-api-access-w8mk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:00 crc kubenswrapper[4809]: E0930 00:32:00.116034 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4c748de1-fa7d-41b8-9a1c-5fb693750edc-combined-ca-bundle podName:4c748de1-fa7d-41b8-9a1c-5fb693750edc nodeName:}" failed. No retries permitted until 2025-09-30 00:32:00.615999187 +0000 UTC m=+1371.652248595 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/4c748de1-fa7d-41b8-9a1c-5fb693750edc-combined-ca-bundle") pod "4c748de1-fa7d-41b8-9a1c-5fb693750edc" (UID: "4c748de1-fa7d-41b8-9a1c-5fb693750edc") : error deleting /var/lib/kubelet/pods/4c748de1-fa7d-41b8-9a1c-5fb693750edc/volume-subpaths: remove /var/lib/kubelet/pods/4c748de1-fa7d-41b8-9a1c-5fb693750edc/volume-subpaths: no such file or directory Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.122323 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c748de1-fa7d-41b8-9a1c-5fb693750edc-config-data" (OuterVolumeSpecName: "config-data") pod "4c748de1-fa7d-41b8-9a1c-5fb693750edc" (UID: "4c748de1-fa7d-41b8-9a1c-5fb693750edc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.189099 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c748de1-fa7d-41b8-9a1c-5fb693750edc-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.189372 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8mk7\" (UniqueName: \"kubernetes.io/projected/4c748de1-fa7d-41b8-9a1c-5fb693750edc-kube-api-access-w8mk7\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.383301 4809 generic.go:334] "Generic (PLEG): container finished" podID="4c748de1-fa7d-41b8-9a1c-5fb693750edc" containerID="352ddfdd2e9b1c76ca0956db9ccc53fa5edf5ec0ffb26d41f29e29d9eaf60a2d" exitCode=0 Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.383336 4809 generic.go:334] "Generic (PLEG): container finished" podID="4c748de1-fa7d-41b8-9a1c-5fb693750edc" containerID="a4b8125e5f380e3e4a2a673dc07bede603b6b2907445268b30b7ae8cc5a8d07e" exitCode=143 Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.383436 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4c748de1-fa7d-41b8-9a1c-5fb693750edc","Type":"ContainerDied","Data":"352ddfdd2e9b1c76ca0956db9ccc53fa5edf5ec0ffb26d41f29e29d9eaf60a2d"} Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.383476 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4c748de1-fa7d-41b8-9a1c-5fb693750edc","Type":"ContainerDied","Data":"a4b8125e5f380e3e4a2a673dc07bede603b6b2907445268b30b7ae8cc5a8d07e"} Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.383490 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4c748de1-fa7d-41b8-9a1c-5fb693750edc","Type":"ContainerDied","Data":"3007833ac45cc1ab4a84f4ebedad54360e2e35b32b7efe2d784c00e5bbd79e9b"} Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.383508 4809 scope.go:117] "RemoveContainer" containerID="352ddfdd2e9b1c76ca0956db9ccc53fa5edf5ec0ffb26d41f29e29d9eaf60a2d" Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.386080 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.413406 4809 scope.go:117] "RemoveContainer" containerID="a4b8125e5f380e3e4a2a673dc07bede603b6b2907445268b30b7ae8cc5a8d07e" Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.459855 4809 scope.go:117] "RemoveContainer" containerID="352ddfdd2e9b1c76ca0956db9ccc53fa5edf5ec0ffb26d41f29e29d9eaf60a2d" Sep 30 00:32:00 crc kubenswrapper[4809]: E0930 00:32:00.460333 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"352ddfdd2e9b1c76ca0956db9ccc53fa5edf5ec0ffb26d41f29e29d9eaf60a2d\": container with ID starting with 352ddfdd2e9b1c76ca0956db9ccc53fa5edf5ec0ffb26d41f29e29d9eaf60a2d not found: ID does not exist" containerID="352ddfdd2e9b1c76ca0956db9ccc53fa5edf5ec0ffb26d41f29e29d9eaf60a2d" Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.460364 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"352ddfdd2e9b1c76ca0956db9ccc53fa5edf5ec0ffb26d41f29e29d9eaf60a2d"} err="failed to get container status \"352ddfdd2e9b1c76ca0956db9ccc53fa5edf5ec0ffb26d41f29e29d9eaf60a2d\": rpc error: code = NotFound desc = could not find container \"352ddfdd2e9b1c76ca0956db9ccc53fa5edf5ec0ffb26d41f29e29d9eaf60a2d\": container with ID starting with 352ddfdd2e9b1c76ca0956db9ccc53fa5edf5ec0ffb26d41f29e29d9eaf60a2d not found: ID does not exist" Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.460402 4809 scope.go:117] "RemoveContainer" containerID="a4b8125e5f380e3e4a2a673dc07bede603b6b2907445268b30b7ae8cc5a8d07e" Sep 30 00:32:00 crc kubenswrapper[4809]: E0930 00:32:00.461031 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4b8125e5f380e3e4a2a673dc07bede603b6b2907445268b30b7ae8cc5a8d07e\": container with ID starting with a4b8125e5f380e3e4a2a673dc07bede603b6b2907445268b30b7ae8cc5a8d07e not found: ID does not exist" containerID="a4b8125e5f380e3e4a2a673dc07bede603b6b2907445268b30b7ae8cc5a8d07e" Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.461057 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4b8125e5f380e3e4a2a673dc07bede603b6b2907445268b30b7ae8cc5a8d07e"} err="failed to get container status \"a4b8125e5f380e3e4a2a673dc07bede603b6b2907445268b30b7ae8cc5a8d07e\": rpc error: code = NotFound desc = could not find container \"a4b8125e5f380e3e4a2a673dc07bede603b6b2907445268b30b7ae8cc5a8d07e\": container with ID starting with a4b8125e5f380e3e4a2a673dc07bede603b6b2907445268b30b7ae8cc5a8d07e not found: ID does not exist" Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.461075 4809 scope.go:117] "RemoveContainer" containerID="352ddfdd2e9b1c76ca0956db9ccc53fa5edf5ec0ffb26d41f29e29d9eaf60a2d" Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.461395 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"352ddfdd2e9b1c76ca0956db9ccc53fa5edf5ec0ffb26d41f29e29d9eaf60a2d"} err="failed to get container status \"352ddfdd2e9b1c76ca0956db9ccc53fa5edf5ec0ffb26d41f29e29d9eaf60a2d\": rpc error: code = NotFound desc = could not find container \"352ddfdd2e9b1c76ca0956db9ccc53fa5edf5ec0ffb26d41f29e29d9eaf60a2d\": container with ID starting with 352ddfdd2e9b1c76ca0956db9ccc53fa5edf5ec0ffb26d41f29e29d9eaf60a2d not found: ID does not exist" Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.461426 4809 scope.go:117] "RemoveContainer" containerID="a4b8125e5f380e3e4a2a673dc07bede603b6b2907445268b30b7ae8cc5a8d07e" Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.461943 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4b8125e5f380e3e4a2a673dc07bede603b6b2907445268b30b7ae8cc5a8d07e"} err="failed to get container status \"a4b8125e5f380e3e4a2a673dc07bede603b6b2907445268b30b7ae8cc5a8d07e\": rpc error: code = NotFound desc = could not find container \"a4b8125e5f380e3e4a2a673dc07bede603b6b2907445268b30b7ae8cc5a8d07e\": container with ID starting with a4b8125e5f380e3e4a2a673dc07bede603b6b2907445268b30b7ae8cc5a8d07e not found: ID does not exist" Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.706529 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c748de1-fa7d-41b8-9a1c-5fb693750edc-combined-ca-bundle\") pod \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\" (UID: \"4c748de1-fa7d-41b8-9a1c-5fb693750edc\") " Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.716806 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c748de1-fa7d-41b8-9a1c-5fb693750edc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4c748de1-fa7d-41b8-9a1c-5fb693750edc" (UID: "4c748de1-fa7d-41b8-9a1c-5fb693750edc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:00 crc kubenswrapper[4809]: I0930 00:32:00.808896 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c748de1-fa7d-41b8-9a1c-5fb693750edc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.044385 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.061235 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.075491 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:32:01 crc kubenswrapper[4809]: E0930 00:32:01.076160 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c748de1-fa7d-41b8-9a1c-5fb693750edc" containerName="nova-metadata-log" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.076194 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c748de1-fa7d-41b8-9a1c-5fb693750edc" containerName="nova-metadata-log" Sep 30 00:32:01 crc kubenswrapper[4809]: E0930 00:32:01.076230 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76dd894e-8637-49ec-953e-6e5aa1ee4e21" containerName="mariadb-account-create" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.076243 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="76dd894e-8637-49ec-953e-6e5aa1ee4e21" containerName="mariadb-account-create" Sep 30 00:32:01 crc kubenswrapper[4809]: E0930 00:32:01.077070 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c748de1-fa7d-41b8-9a1c-5fb693750edc" containerName="nova-metadata-metadata" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.077117 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c748de1-fa7d-41b8-9a1c-5fb693750edc" containerName="nova-metadata-metadata" Sep 30 00:32:01 crc kubenswrapper[4809]: E0930 00:32:01.077147 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="107d7545-41aa-471a-86e9-aa7e557f8faa" containerName="heat-api" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.077159 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="107d7545-41aa-471a-86e9-aa7e557f8faa" containerName="heat-api" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.077527 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c748de1-fa7d-41b8-9a1c-5fb693750edc" containerName="nova-metadata-log" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.077564 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c748de1-fa7d-41b8-9a1c-5fb693750edc" containerName="nova-metadata-metadata" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.077582 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="76dd894e-8637-49ec-953e-6e5aa1ee4e21" containerName="mariadb-account-create" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.077624 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="107d7545-41aa-471a-86e9-aa7e557f8faa" containerName="heat-api" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.079374 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.083312 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.083467 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.115492 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56e1259f-11d8-4421-84c7-ac3578a9aafb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"56e1259f-11d8-4421-84c7-ac3578a9aafb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.115568 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56e1259f-11d8-4421-84c7-ac3578a9aafb-logs\") pod \"nova-metadata-0\" (UID: \"56e1259f-11d8-4421-84c7-ac3578a9aafb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.115591 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vksfg\" (UniqueName: \"kubernetes.io/projected/56e1259f-11d8-4421-84c7-ac3578a9aafb-kube-api-access-vksfg\") pod \"nova-metadata-0\" (UID: \"56e1259f-11d8-4421-84c7-ac3578a9aafb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.115690 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56e1259f-11d8-4421-84c7-ac3578a9aafb-config-data\") pod \"nova-metadata-0\" (UID: \"56e1259f-11d8-4421-84c7-ac3578a9aafb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.115710 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/56e1259f-11d8-4421-84c7-ac3578a9aafb-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"56e1259f-11d8-4421-84c7-ac3578a9aafb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.116398 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.221719 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56e1259f-11d8-4421-84c7-ac3578a9aafb-config-data\") pod \"nova-metadata-0\" (UID: \"56e1259f-11d8-4421-84c7-ac3578a9aafb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.221780 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/56e1259f-11d8-4421-84c7-ac3578a9aafb-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"56e1259f-11d8-4421-84c7-ac3578a9aafb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.221918 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56e1259f-11d8-4421-84c7-ac3578a9aafb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"56e1259f-11d8-4421-84c7-ac3578a9aafb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.222024 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56e1259f-11d8-4421-84c7-ac3578a9aafb-logs\") pod \"nova-metadata-0\" (UID: \"56e1259f-11d8-4421-84c7-ac3578a9aafb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.223802 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vksfg\" (UniqueName: \"kubernetes.io/projected/56e1259f-11d8-4421-84c7-ac3578a9aafb-kube-api-access-vksfg\") pod \"nova-metadata-0\" (UID: \"56e1259f-11d8-4421-84c7-ac3578a9aafb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.222720 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56e1259f-11d8-4421-84c7-ac3578a9aafb-logs\") pod \"nova-metadata-0\" (UID: \"56e1259f-11d8-4421-84c7-ac3578a9aafb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.226174 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56e1259f-11d8-4421-84c7-ac3578a9aafb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"56e1259f-11d8-4421-84c7-ac3578a9aafb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.226961 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56e1259f-11d8-4421-84c7-ac3578a9aafb-config-data\") pod \"nova-metadata-0\" (UID: \"56e1259f-11d8-4421-84c7-ac3578a9aafb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.227431 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/56e1259f-11d8-4421-84c7-ac3578a9aafb-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"56e1259f-11d8-4421-84c7-ac3578a9aafb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.246519 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vksfg\" (UniqueName: \"kubernetes.io/projected/56e1259f-11d8-4421-84c7-ac3578a9aafb-kube-api-access-vksfg\") pod \"nova-metadata-0\" (UID: \"56e1259f-11d8-4421-84c7-ac3578a9aafb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.426940 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.708311 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c748de1-fa7d-41b8-9a1c-5fb693750edc" path="/var/lib/kubelet/pods/4c748de1-fa7d-41b8-9a1c-5fb693750edc/volumes" Sep 30 00:32:01 crc kubenswrapper[4809]: I0930 00:32:01.923512 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:32:02 crc kubenswrapper[4809]: I0930 00:32:02.408012 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"56e1259f-11d8-4421-84c7-ac3578a9aafb","Type":"ContainerStarted","Data":"b2d38a397257351b89d27d2e709b225bd98ab70357db65a57247ecfa0b552360"} Sep 30 00:32:02 crc kubenswrapper[4809]: I0930 00:32:02.408320 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"56e1259f-11d8-4421-84c7-ac3578a9aafb","Type":"ContainerStarted","Data":"c4bd2623f5265d7b38c62eba3a4cec5e92d068cdab6a1604e3d54279ac917e09"} Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.381582 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-2bnnc"] Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.383753 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-2bnnc" Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.387534 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.387719 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.387907 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-vzdp5" Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.397515 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-2bnnc"] Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.437547 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"56e1259f-11d8-4421-84c7-ac3578a9aafb","Type":"ContainerStarted","Data":"b3f950384c3f35ac10efb7e6a0ccaa12a690930e8beaaca1c5930063cc8e7ce4"} Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.439980 4809 generic.go:334] "Generic (PLEG): container finished" podID="14b3d277-31b4-4f12-b360-aeabe9420f33" containerID="242d241e4e0d2e4694ea04f4fb50d749755a51ed502d97869e24c0b2218f2fee" exitCode=0 Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.440040 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-cvhd7" event={"ID":"14b3d277-31b4-4f12-b360-aeabe9420f33","Type":"ContainerDied","Data":"242d241e4e0d2e4694ea04f4fb50d749755a51ed502d97869e24c0b2218f2fee"} Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.466577 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.4665452820000002 podStartE2EDuration="2.466545282s" podCreationTimestamp="2025-09-30 00:32:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:32:03.458470551 +0000 UTC m=+1374.494719969" watchObservedRunningTime="2025-09-30 00:32:03.466545282 +0000 UTC m=+1374.502794690" Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.475795 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1120e47a-9490-497a-a79b-aa2941e070f7-combined-ca-bundle\") pod \"aodh-db-sync-2bnnc\" (UID: \"1120e47a-9490-497a-a79b-aa2941e070f7\") " pod="openstack/aodh-db-sync-2bnnc" Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.475939 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfjpv\" (UniqueName: \"kubernetes.io/projected/1120e47a-9490-497a-a79b-aa2941e070f7-kube-api-access-tfjpv\") pod \"aodh-db-sync-2bnnc\" (UID: \"1120e47a-9490-497a-a79b-aa2941e070f7\") " pod="openstack/aodh-db-sync-2bnnc" Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.476120 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1120e47a-9490-497a-a79b-aa2941e070f7-config-data\") pod \"aodh-db-sync-2bnnc\" (UID: \"1120e47a-9490-497a-a79b-aa2941e070f7\") " pod="openstack/aodh-db-sync-2bnnc" Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.476262 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1120e47a-9490-497a-a79b-aa2941e070f7-scripts\") pod \"aodh-db-sync-2bnnc\" (UID: \"1120e47a-9490-497a-a79b-aa2941e070f7\") " pod="openstack/aodh-db-sync-2bnnc" Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.578492 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1120e47a-9490-497a-a79b-aa2941e070f7-scripts\") pod \"aodh-db-sync-2bnnc\" (UID: \"1120e47a-9490-497a-a79b-aa2941e070f7\") " pod="openstack/aodh-db-sync-2bnnc" Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.578564 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1120e47a-9490-497a-a79b-aa2941e070f7-combined-ca-bundle\") pod \"aodh-db-sync-2bnnc\" (UID: \"1120e47a-9490-497a-a79b-aa2941e070f7\") " pod="openstack/aodh-db-sync-2bnnc" Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.578588 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfjpv\" (UniqueName: \"kubernetes.io/projected/1120e47a-9490-497a-a79b-aa2941e070f7-kube-api-access-tfjpv\") pod \"aodh-db-sync-2bnnc\" (UID: \"1120e47a-9490-497a-a79b-aa2941e070f7\") " pod="openstack/aodh-db-sync-2bnnc" Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.578695 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1120e47a-9490-497a-a79b-aa2941e070f7-config-data\") pod \"aodh-db-sync-2bnnc\" (UID: \"1120e47a-9490-497a-a79b-aa2941e070f7\") " pod="openstack/aodh-db-sync-2bnnc" Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.585916 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1120e47a-9490-497a-a79b-aa2941e070f7-config-data\") pod \"aodh-db-sync-2bnnc\" (UID: \"1120e47a-9490-497a-a79b-aa2941e070f7\") " pod="openstack/aodh-db-sync-2bnnc" Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.590859 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1120e47a-9490-497a-a79b-aa2941e070f7-combined-ca-bundle\") pod \"aodh-db-sync-2bnnc\" (UID: \"1120e47a-9490-497a-a79b-aa2941e070f7\") " pod="openstack/aodh-db-sync-2bnnc" Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.601396 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1120e47a-9490-497a-a79b-aa2941e070f7-scripts\") pod \"aodh-db-sync-2bnnc\" (UID: \"1120e47a-9490-497a-a79b-aa2941e070f7\") " pod="openstack/aodh-db-sync-2bnnc" Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.615352 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfjpv\" (UniqueName: \"kubernetes.io/projected/1120e47a-9490-497a-a79b-aa2941e070f7-kube-api-access-tfjpv\") pod \"aodh-db-sync-2bnnc\" (UID: \"1120e47a-9490-497a-a79b-aa2941e070f7\") " pod="openstack/aodh-db-sync-2bnnc" Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.626818 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.626863 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.661160 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 00:32:03 crc kubenswrapper[4809]: I0930 00:32:03.717988 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-2bnnc" Sep 30 00:32:04 crc kubenswrapper[4809]: I0930 00:32:04.018036 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 00:32:04 crc kubenswrapper[4809]: I0930 00:32:04.018342 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 00:32:04 crc kubenswrapper[4809]: I0930 00:32:04.058856 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:04 crc kubenswrapper[4809]: I0930 00:32:04.112220 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:32:04 crc kubenswrapper[4809]: I0930 00:32:04.191560 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f6bc4c6c9-r57b4"] Sep 30 00:32:04 crc kubenswrapper[4809]: I0930 00:32:04.191863 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" podUID="f585e9ed-4d32-49f6-84af-87ba0d4093d9" containerName="dnsmasq-dns" containerID="cri-o://c7e4488d8ae93abb023004d909c95e94370565ae43d03fd660092d4e7c823dcd" gracePeriod=10 Sep 30 00:32:04 crc kubenswrapper[4809]: I0930 00:32:04.343853 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-2bnnc"] Sep 30 00:32:04 crc kubenswrapper[4809]: W0930 00:32:04.358144 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1120e47a_9490_497a_a79b_aa2941e070f7.slice/crio-00a56c1a7ad60c64534e9b666aa9180217f87b112a81730bb846869a75289cae WatchSource:0}: Error finding container 00a56c1a7ad60c64534e9b666aa9180217f87b112a81730bb846869a75289cae: Status 404 returned error can't find the container with id 00a56c1a7ad60c64534e9b666aa9180217f87b112a81730bb846869a75289cae Sep 30 00:32:04 crc kubenswrapper[4809]: I0930 00:32:04.466003 4809 generic.go:334] "Generic (PLEG): container finished" podID="e2f48302-4af1-49a1-948a-086de787a0c4" containerID="eab730e58276147ed70d915f0c84500f12b6dca67d97368f0c7b708c7394f3c8" exitCode=0 Sep 30 00:32:04 crc kubenswrapper[4809]: I0930 00:32:04.466097 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-zvndc" event={"ID":"e2f48302-4af1-49a1-948a-086de787a0c4","Type":"ContainerDied","Data":"eab730e58276147ed70d915f0c84500f12b6dca67d97368f0c7b708c7394f3c8"} Sep 30 00:32:04 crc kubenswrapper[4809]: I0930 00:32:04.471969 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-2bnnc" event={"ID":"1120e47a-9490-497a-a79b-aa2941e070f7","Type":"ContainerStarted","Data":"00a56c1a7ad60c64534e9b666aa9180217f87b112a81730bb846869a75289cae"} Sep 30 00:32:04 crc kubenswrapper[4809]: I0930 00:32:04.476901 4809 generic.go:334] "Generic (PLEG): container finished" podID="f585e9ed-4d32-49f6-84af-87ba0d4093d9" containerID="c7e4488d8ae93abb023004d909c95e94370565ae43d03fd660092d4e7c823dcd" exitCode=0 Sep 30 00:32:04 crc kubenswrapper[4809]: I0930 00:32:04.477192 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" event={"ID":"f585e9ed-4d32-49f6-84af-87ba0d4093d9","Type":"ContainerDied","Data":"c7e4488d8ae93abb023004d909c95e94370565ae43d03fd660092d4e7c823dcd"} Sep 30 00:32:04 crc kubenswrapper[4809]: I0930 00:32:04.530301 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.111840 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8d59630e-01bf-4c23-b523-85e077e9d828" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.219:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.112666 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8d59630e-01bf-4c23-b523-85e077e9d828" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.219:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.180627 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.217620 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-ovsdbserver-nb\") pod \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.217669 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-dns-swift-storage-0\") pod \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.217857 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8wqh\" (UniqueName: \"kubernetes.io/projected/f585e9ed-4d32-49f6-84af-87ba0d4093d9-kube-api-access-g8wqh\") pod \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.217908 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-config\") pod \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.217926 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-ovsdbserver-sb\") pod \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.217983 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-dns-svc\") pod \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\" (UID: \"f585e9ed-4d32-49f6-84af-87ba0d4093d9\") " Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.227465 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f585e9ed-4d32-49f6-84af-87ba0d4093d9-kube-api-access-g8wqh" (OuterVolumeSpecName: "kube-api-access-g8wqh") pod "f585e9ed-4d32-49f6-84af-87ba0d4093d9" (UID: "f585e9ed-4d32-49f6-84af-87ba0d4093d9"). InnerVolumeSpecName "kube-api-access-g8wqh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.324419 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8wqh\" (UniqueName: \"kubernetes.io/projected/f585e9ed-4d32-49f6-84af-87ba0d4093d9-kube-api-access-g8wqh\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.340707 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-config" (OuterVolumeSpecName: "config") pod "f585e9ed-4d32-49f6-84af-87ba0d4093d9" (UID: "f585e9ed-4d32-49f6-84af-87ba0d4093d9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.346096 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f585e9ed-4d32-49f6-84af-87ba0d4093d9" (UID: "f585e9ed-4d32-49f6-84af-87ba0d4093d9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.346316 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-cvhd7" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.374880 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f585e9ed-4d32-49f6-84af-87ba0d4093d9" (UID: "f585e9ed-4d32-49f6-84af-87ba0d4093d9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.383047 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f585e9ed-4d32-49f6-84af-87ba0d4093d9" (UID: "f585e9ed-4d32-49f6-84af-87ba0d4093d9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.389925 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f585e9ed-4d32-49f6-84af-87ba0d4093d9" (UID: "f585e9ed-4d32-49f6-84af-87ba0d4093d9"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.425887 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14b3d277-31b4-4f12-b360-aeabe9420f33-combined-ca-bundle\") pod \"14b3d277-31b4-4f12-b360-aeabe9420f33\" (UID: \"14b3d277-31b4-4f12-b360-aeabe9420f33\") " Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.425933 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gk9p2\" (UniqueName: \"kubernetes.io/projected/14b3d277-31b4-4f12-b360-aeabe9420f33-kube-api-access-gk9p2\") pod \"14b3d277-31b4-4f12-b360-aeabe9420f33\" (UID: \"14b3d277-31b4-4f12-b360-aeabe9420f33\") " Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.428423 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14b3d277-31b4-4f12-b360-aeabe9420f33-scripts\") pod \"14b3d277-31b4-4f12-b360-aeabe9420f33\" (UID: \"14b3d277-31b4-4f12-b360-aeabe9420f33\") " Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.428679 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14b3d277-31b4-4f12-b360-aeabe9420f33-config-data\") pod \"14b3d277-31b4-4f12-b360-aeabe9420f33\" (UID: \"14b3d277-31b4-4f12-b360-aeabe9420f33\") " Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.430903 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.430927 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.430937 4809 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.430947 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.430955 4809 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f585e9ed-4d32-49f6-84af-87ba0d4093d9-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.431289 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14b3d277-31b4-4f12-b360-aeabe9420f33-kube-api-access-gk9p2" (OuterVolumeSpecName: "kube-api-access-gk9p2") pod "14b3d277-31b4-4f12-b360-aeabe9420f33" (UID: "14b3d277-31b4-4f12-b360-aeabe9420f33"). InnerVolumeSpecName "kube-api-access-gk9p2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.439207 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14b3d277-31b4-4f12-b360-aeabe9420f33-scripts" (OuterVolumeSpecName: "scripts") pod "14b3d277-31b4-4f12-b360-aeabe9420f33" (UID: "14b3d277-31b4-4f12-b360-aeabe9420f33"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.470931 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14b3d277-31b4-4f12-b360-aeabe9420f33-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14b3d277-31b4-4f12-b360-aeabe9420f33" (UID: "14b3d277-31b4-4f12-b360-aeabe9420f33"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.483035 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14b3d277-31b4-4f12-b360-aeabe9420f33-config-data" (OuterVolumeSpecName: "config-data") pod "14b3d277-31b4-4f12-b360-aeabe9420f33" (UID: "14b3d277-31b4-4f12-b360-aeabe9420f33"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.521758 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-cvhd7" event={"ID":"14b3d277-31b4-4f12-b360-aeabe9420f33","Type":"ContainerDied","Data":"f5aa4d20478facf2121187b59b487c7d296293529b510859c6f62d4f0542fa1c"} Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.521832 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5aa4d20478facf2121187b59b487c7d296293529b510859c6f62d4f0542fa1c" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.521857 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-cvhd7" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.527385 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.527632 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f6bc4c6c9-r57b4" event={"ID":"f585e9ed-4d32-49f6-84af-87ba0d4093d9","Type":"ContainerDied","Data":"a71a857249ad0fb47eb3aa5c7e6039b700e4741c895328ad8ffe6620b931a07f"} Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.527718 4809 scope.go:117] "RemoveContainer" containerID="c7e4488d8ae93abb023004d909c95e94370565ae43d03fd660092d4e7c823dcd" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.544357 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14b3d277-31b4-4f12-b360-aeabe9420f33-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.544414 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14b3d277-31b4-4f12-b360-aeabe9420f33-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.544432 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gk9p2\" (UniqueName: \"kubernetes.io/projected/14b3d277-31b4-4f12-b360-aeabe9420f33-kube-api-access-gk9p2\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.544445 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14b3d277-31b4-4f12-b360-aeabe9420f33-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.591065 4809 scope.go:117] "RemoveContainer" containerID="282071e58843d368f5fc2832bb70e1bdaf78a98cda8e656caab4a77656287b8b" Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.752423 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.752474 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f6bc4c6c9-r57b4"] Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.752783 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8d59630e-01bf-4c23-b523-85e077e9d828" containerName="nova-api-log" containerID="cri-o://064e18ec859c753d0fe1315b1dded864bb4f2e2da08807c46c647370168035b2" gracePeriod=30 Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.753168 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8d59630e-01bf-4c23-b523-85e077e9d828" containerName="nova-api-api" containerID="cri-o://953f6ece01dae039e25ba7681a76b7c011daf87cedeaf43179abe74dc5f2912f" gracePeriod=30 Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.755724 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.755986 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="56e1259f-11d8-4421-84c7-ac3578a9aafb" containerName="nova-metadata-log" containerID="cri-o://b2d38a397257351b89d27d2e709b225bd98ab70357db65a57247ecfa0b552360" gracePeriod=30 Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.756159 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="56e1259f-11d8-4421-84c7-ac3578a9aafb" containerName="nova-metadata-metadata" containerID="cri-o://b3f950384c3f35ac10efb7e6a0ccaa12a690930e8beaaca1c5930063cc8e7ce4" gracePeriod=30 Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.773758 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f6bc4c6c9-r57b4"] Sep 30 00:32:05 crc kubenswrapper[4809]: I0930 00:32:05.796373 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.087992 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-zvndc" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.161617 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2f48302-4af1-49a1-948a-086de787a0c4-config-data\") pod \"e2f48302-4af1-49a1-948a-086de787a0c4\" (UID: \"e2f48302-4af1-49a1-948a-086de787a0c4\") " Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.161805 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2f48302-4af1-49a1-948a-086de787a0c4-combined-ca-bundle\") pod \"e2f48302-4af1-49a1-948a-086de787a0c4\" (UID: \"e2f48302-4af1-49a1-948a-086de787a0c4\") " Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.162025 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d7x8x\" (UniqueName: \"kubernetes.io/projected/e2f48302-4af1-49a1-948a-086de787a0c4-kube-api-access-d7x8x\") pod \"e2f48302-4af1-49a1-948a-086de787a0c4\" (UID: \"e2f48302-4af1-49a1-948a-086de787a0c4\") " Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.162082 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2f48302-4af1-49a1-948a-086de787a0c4-scripts\") pod \"e2f48302-4af1-49a1-948a-086de787a0c4\" (UID: \"e2f48302-4af1-49a1-948a-086de787a0c4\") " Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.167913 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2f48302-4af1-49a1-948a-086de787a0c4-kube-api-access-d7x8x" (OuterVolumeSpecName: "kube-api-access-d7x8x") pod "e2f48302-4af1-49a1-948a-086de787a0c4" (UID: "e2f48302-4af1-49a1-948a-086de787a0c4"). InnerVolumeSpecName "kube-api-access-d7x8x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.170232 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2f48302-4af1-49a1-948a-086de787a0c4-scripts" (OuterVolumeSpecName: "scripts") pod "e2f48302-4af1-49a1-948a-086de787a0c4" (UID: "e2f48302-4af1-49a1-948a-086de787a0c4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.195066 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2f48302-4af1-49a1-948a-086de787a0c4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e2f48302-4af1-49a1-948a-086de787a0c4" (UID: "e2f48302-4af1-49a1-948a-086de787a0c4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.224904 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2f48302-4af1-49a1-948a-086de787a0c4-config-data" (OuterVolumeSpecName: "config-data") pod "e2f48302-4af1-49a1-948a-086de787a0c4" (UID: "e2f48302-4af1-49a1-948a-086de787a0c4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.265989 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2f48302-4af1-49a1-948a-086de787a0c4-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.266344 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2f48302-4af1-49a1-948a-086de787a0c4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.266382 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d7x8x\" (UniqueName: \"kubernetes.io/projected/e2f48302-4af1-49a1-948a-086de787a0c4-kube-api-access-d7x8x\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.266391 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2f48302-4af1-49a1-948a-086de787a0c4-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.427741 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.427854 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.445944 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.473624 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56e1259f-11d8-4421-84c7-ac3578a9aafb-logs\") pod \"56e1259f-11d8-4421-84c7-ac3578a9aafb\" (UID: \"56e1259f-11d8-4421-84c7-ac3578a9aafb\") " Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.473693 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56e1259f-11d8-4421-84c7-ac3578a9aafb-config-data\") pod \"56e1259f-11d8-4421-84c7-ac3578a9aafb\" (UID: \"56e1259f-11d8-4421-84c7-ac3578a9aafb\") " Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.473753 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/56e1259f-11d8-4421-84c7-ac3578a9aafb-nova-metadata-tls-certs\") pod \"56e1259f-11d8-4421-84c7-ac3578a9aafb\" (UID: \"56e1259f-11d8-4421-84c7-ac3578a9aafb\") " Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.474018 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vksfg\" (UniqueName: \"kubernetes.io/projected/56e1259f-11d8-4421-84c7-ac3578a9aafb-kube-api-access-vksfg\") pod \"56e1259f-11d8-4421-84c7-ac3578a9aafb\" (UID: \"56e1259f-11d8-4421-84c7-ac3578a9aafb\") " Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.474135 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56e1259f-11d8-4421-84c7-ac3578a9aafb-combined-ca-bundle\") pod \"56e1259f-11d8-4421-84c7-ac3578a9aafb\" (UID: \"56e1259f-11d8-4421-84c7-ac3578a9aafb\") " Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.484016 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56e1259f-11d8-4421-84c7-ac3578a9aafb-kube-api-access-vksfg" (OuterVolumeSpecName: "kube-api-access-vksfg") pod "56e1259f-11d8-4421-84c7-ac3578a9aafb" (UID: "56e1259f-11d8-4421-84c7-ac3578a9aafb"). InnerVolumeSpecName "kube-api-access-vksfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.484291 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56e1259f-11d8-4421-84c7-ac3578a9aafb-logs" (OuterVolumeSpecName: "logs") pod "56e1259f-11d8-4421-84c7-ac3578a9aafb" (UID: "56e1259f-11d8-4421-84c7-ac3578a9aafb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.526302 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56e1259f-11d8-4421-84c7-ac3578a9aafb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "56e1259f-11d8-4421-84c7-ac3578a9aafb" (UID: "56e1259f-11d8-4421-84c7-ac3578a9aafb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.580183 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vksfg\" (UniqueName: \"kubernetes.io/projected/56e1259f-11d8-4421-84c7-ac3578a9aafb-kube-api-access-vksfg\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.580228 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56e1259f-11d8-4421-84c7-ac3578a9aafb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.580241 4809 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56e1259f-11d8-4421-84c7-ac3578a9aafb-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.584130 4809 generic.go:334] "Generic (PLEG): container finished" podID="8d59630e-01bf-4c23-b523-85e077e9d828" containerID="064e18ec859c753d0fe1315b1dded864bb4f2e2da08807c46c647370168035b2" exitCode=143 Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.584275 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8d59630e-01bf-4c23-b523-85e077e9d828","Type":"ContainerDied","Data":"064e18ec859c753d0fe1315b1dded864bb4f2e2da08807c46c647370168035b2"} Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.603876 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-zvndc" event={"ID":"e2f48302-4af1-49a1-948a-086de787a0c4","Type":"ContainerDied","Data":"e0c05eb9b06441d036f4e709ba3febfc19f6fef77b95d6ab209e2a8338c1bd72"} Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.603918 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0c05eb9b06441d036f4e709ba3febfc19f6fef77b95d6ab209e2a8338c1bd72" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.603977 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-zvndc" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.612522 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 00:32:06 crc kubenswrapper[4809]: E0930 00:32:06.613006 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56e1259f-11d8-4421-84c7-ac3578a9aafb" containerName="nova-metadata-metadata" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.613020 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="56e1259f-11d8-4421-84c7-ac3578a9aafb" containerName="nova-metadata-metadata" Sep 30 00:32:06 crc kubenswrapper[4809]: E0930 00:32:06.613037 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2f48302-4af1-49a1-948a-086de787a0c4" containerName="nova-cell1-conductor-db-sync" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.613045 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2f48302-4af1-49a1-948a-086de787a0c4" containerName="nova-cell1-conductor-db-sync" Sep 30 00:32:06 crc kubenswrapper[4809]: E0930 00:32:06.613057 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f585e9ed-4d32-49f6-84af-87ba0d4093d9" containerName="dnsmasq-dns" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.613063 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="f585e9ed-4d32-49f6-84af-87ba0d4093d9" containerName="dnsmasq-dns" Sep 30 00:32:06 crc kubenswrapper[4809]: E0930 00:32:06.613075 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14b3d277-31b4-4f12-b360-aeabe9420f33" containerName="nova-manage" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.613082 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="14b3d277-31b4-4f12-b360-aeabe9420f33" containerName="nova-manage" Sep 30 00:32:06 crc kubenswrapper[4809]: E0930 00:32:06.613088 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f585e9ed-4d32-49f6-84af-87ba0d4093d9" containerName="init" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.613094 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="f585e9ed-4d32-49f6-84af-87ba0d4093d9" containerName="init" Sep 30 00:32:06 crc kubenswrapper[4809]: E0930 00:32:06.613125 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56e1259f-11d8-4421-84c7-ac3578a9aafb" containerName="nova-metadata-log" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.613131 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="56e1259f-11d8-4421-84c7-ac3578a9aafb" containerName="nova-metadata-log" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.613364 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2f48302-4af1-49a1-948a-086de787a0c4" containerName="nova-cell1-conductor-db-sync" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.613378 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="14b3d277-31b4-4f12-b360-aeabe9420f33" containerName="nova-manage" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.613390 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="f585e9ed-4d32-49f6-84af-87ba0d4093d9" containerName="dnsmasq-dns" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.613406 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="56e1259f-11d8-4421-84c7-ac3578a9aafb" containerName="nova-metadata-metadata" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.613417 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="56e1259f-11d8-4421-84c7-ac3578a9aafb" containerName="nova-metadata-log" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.614166 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.618689 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56e1259f-11d8-4421-84c7-ac3578a9aafb-config-data" (OuterVolumeSpecName: "config-data") pod "56e1259f-11d8-4421-84c7-ac3578a9aafb" (UID: "56e1259f-11d8-4421-84c7-ac3578a9aafb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.618755 4809 generic.go:334] "Generic (PLEG): container finished" podID="56e1259f-11d8-4421-84c7-ac3578a9aafb" containerID="b3f950384c3f35ac10efb7e6a0ccaa12a690930e8beaaca1c5930063cc8e7ce4" exitCode=0 Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.618773 4809 generic.go:334] "Generic (PLEG): container finished" podID="56e1259f-11d8-4421-84c7-ac3578a9aafb" containerID="b2d38a397257351b89d27d2e709b225bd98ab70357db65a57247ecfa0b552360" exitCode=143 Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.618811 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"56e1259f-11d8-4421-84c7-ac3578a9aafb","Type":"ContainerDied","Data":"b3f950384c3f35ac10efb7e6a0ccaa12a690930e8beaaca1c5930063cc8e7ce4"} Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.618831 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"56e1259f-11d8-4421-84c7-ac3578a9aafb","Type":"ContainerDied","Data":"b2d38a397257351b89d27d2e709b225bd98ab70357db65a57247ecfa0b552360"} Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.618837 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.618851 4809 scope.go:117] "RemoveContainer" containerID="b3f950384c3f35ac10efb7e6a0ccaa12a690930e8beaaca1c5930063cc8e7ce4" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.618842 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"56e1259f-11d8-4421-84c7-ac3578a9aafb","Type":"ContainerDied","Data":"c4bd2623f5265d7b38c62eba3a4cec5e92d068cdab6a1604e3d54279ac917e09"} Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.629954 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.643203 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="067ed8b2-f745-47ad-b207-5ac441bc7510" containerName="nova-scheduler-scheduler" containerID="cri-o://f3759933b8497ce5f405fe055122ae39f258eda904bb478b0c3e7b1d72a8849a" gracePeriod=30 Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.664956 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56e1259f-11d8-4421-84c7-ac3578a9aafb-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "56e1259f-11d8-4421-84c7-ac3578a9aafb" (UID: "56e1259f-11d8-4421-84c7-ac3578a9aafb"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.667765 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.683248 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b789d99-904d-412c-9c51-1e40f767385e-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"5b789d99-904d-412c-9c51-1e40f767385e\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.683473 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b789d99-904d-412c-9c51-1e40f767385e-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"5b789d99-904d-412c-9c51-1e40f767385e\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.683509 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfv7v\" (UniqueName: \"kubernetes.io/projected/5b789d99-904d-412c-9c51-1e40f767385e-kube-api-access-kfv7v\") pod \"nova-cell1-conductor-0\" (UID: \"5b789d99-904d-412c-9c51-1e40f767385e\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.683770 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56e1259f-11d8-4421-84c7-ac3578a9aafb-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.683809 4809 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/56e1259f-11d8-4421-84c7-ac3578a9aafb-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.768882 4809 scope.go:117] "RemoveContainer" containerID="b2d38a397257351b89d27d2e709b225bd98ab70357db65a57247ecfa0b552360" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.785977 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b789d99-904d-412c-9c51-1e40f767385e-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"5b789d99-904d-412c-9c51-1e40f767385e\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.786834 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b789d99-904d-412c-9c51-1e40f767385e-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"5b789d99-904d-412c-9c51-1e40f767385e\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.786879 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfv7v\" (UniqueName: \"kubernetes.io/projected/5b789d99-904d-412c-9c51-1e40f767385e-kube-api-access-kfv7v\") pod \"nova-cell1-conductor-0\" (UID: \"5b789d99-904d-412c-9c51-1e40f767385e\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.789872 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b789d99-904d-412c-9c51-1e40f767385e-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"5b789d99-904d-412c-9c51-1e40f767385e\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.792458 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b789d99-904d-412c-9c51-1e40f767385e-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"5b789d99-904d-412c-9c51-1e40f767385e\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.805170 4809 scope.go:117] "RemoveContainer" containerID="b3f950384c3f35ac10efb7e6a0ccaa12a690930e8beaaca1c5930063cc8e7ce4" Sep 30 00:32:06 crc kubenswrapper[4809]: E0930 00:32:06.807295 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3f950384c3f35ac10efb7e6a0ccaa12a690930e8beaaca1c5930063cc8e7ce4\": container with ID starting with b3f950384c3f35ac10efb7e6a0ccaa12a690930e8beaaca1c5930063cc8e7ce4 not found: ID does not exist" containerID="b3f950384c3f35ac10efb7e6a0ccaa12a690930e8beaaca1c5930063cc8e7ce4" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.807338 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3f950384c3f35ac10efb7e6a0ccaa12a690930e8beaaca1c5930063cc8e7ce4"} err="failed to get container status \"b3f950384c3f35ac10efb7e6a0ccaa12a690930e8beaaca1c5930063cc8e7ce4\": rpc error: code = NotFound desc = could not find container \"b3f950384c3f35ac10efb7e6a0ccaa12a690930e8beaaca1c5930063cc8e7ce4\": container with ID starting with b3f950384c3f35ac10efb7e6a0ccaa12a690930e8beaaca1c5930063cc8e7ce4 not found: ID does not exist" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.807363 4809 scope.go:117] "RemoveContainer" containerID="b2d38a397257351b89d27d2e709b225bd98ab70357db65a57247ecfa0b552360" Sep 30 00:32:06 crc kubenswrapper[4809]: E0930 00:32:06.807614 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2d38a397257351b89d27d2e709b225bd98ab70357db65a57247ecfa0b552360\": container with ID starting with b2d38a397257351b89d27d2e709b225bd98ab70357db65a57247ecfa0b552360 not found: ID does not exist" containerID="b2d38a397257351b89d27d2e709b225bd98ab70357db65a57247ecfa0b552360" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.807651 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2d38a397257351b89d27d2e709b225bd98ab70357db65a57247ecfa0b552360"} err="failed to get container status \"b2d38a397257351b89d27d2e709b225bd98ab70357db65a57247ecfa0b552360\": rpc error: code = NotFound desc = could not find container \"b2d38a397257351b89d27d2e709b225bd98ab70357db65a57247ecfa0b552360\": container with ID starting with b2d38a397257351b89d27d2e709b225bd98ab70357db65a57247ecfa0b552360 not found: ID does not exist" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.807664 4809 scope.go:117] "RemoveContainer" containerID="b3f950384c3f35ac10efb7e6a0ccaa12a690930e8beaaca1c5930063cc8e7ce4" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.807821 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3f950384c3f35ac10efb7e6a0ccaa12a690930e8beaaca1c5930063cc8e7ce4"} err="failed to get container status \"b3f950384c3f35ac10efb7e6a0ccaa12a690930e8beaaca1c5930063cc8e7ce4\": rpc error: code = NotFound desc = could not find container \"b3f950384c3f35ac10efb7e6a0ccaa12a690930e8beaaca1c5930063cc8e7ce4\": container with ID starting with b3f950384c3f35ac10efb7e6a0ccaa12a690930e8beaaca1c5930063cc8e7ce4 not found: ID does not exist" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.807840 4809 scope.go:117] "RemoveContainer" containerID="b2d38a397257351b89d27d2e709b225bd98ab70357db65a57247ecfa0b552360" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.808008 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2d38a397257351b89d27d2e709b225bd98ab70357db65a57247ecfa0b552360"} err="failed to get container status \"b2d38a397257351b89d27d2e709b225bd98ab70357db65a57247ecfa0b552360\": rpc error: code = NotFound desc = could not find container \"b2d38a397257351b89d27d2e709b225bd98ab70357db65a57247ecfa0b552360\": container with ID starting with b2d38a397257351b89d27d2e709b225bd98ab70357db65a57247ecfa0b552360 not found: ID does not exist" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.817457 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfv7v\" (UniqueName: \"kubernetes.io/projected/5b789d99-904d-412c-9c51-1e40f767385e-kube-api-access-kfv7v\") pod \"nova-cell1-conductor-0\" (UID: \"5b789d99-904d-412c-9c51-1e40f767385e\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.967807 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:32:06 crc kubenswrapper[4809]: I0930 00:32:06.984176 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.000715 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.019285 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.019440 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.025065 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.025151 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.045831 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.095415 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-logs\") pod \"nova-metadata-0\" (UID: \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\") " pod="openstack/nova-metadata-0" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.095525 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmbqd\" (UniqueName: \"kubernetes.io/projected/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-kube-api-access-gmbqd\") pod \"nova-metadata-0\" (UID: \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\") " pod="openstack/nova-metadata-0" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.095602 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\") " pod="openstack/nova-metadata-0" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.095723 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\") " pod="openstack/nova-metadata-0" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.095776 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-config-data\") pod \"nova-metadata-0\" (UID: \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\") " pod="openstack/nova-metadata-0" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.197633 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-config-data\") pod \"nova-metadata-0\" (UID: \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\") " pod="openstack/nova-metadata-0" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.197798 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-logs\") pod \"nova-metadata-0\" (UID: \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\") " pod="openstack/nova-metadata-0" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.197832 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmbqd\" (UniqueName: \"kubernetes.io/projected/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-kube-api-access-gmbqd\") pod \"nova-metadata-0\" (UID: \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\") " pod="openstack/nova-metadata-0" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.197872 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\") " pod="openstack/nova-metadata-0" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.197913 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\") " pod="openstack/nova-metadata-0" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.199188 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-logs\") pod \"nova-metadata-0\" (UID: \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\") " pod="openstack/nova-metadata-0" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.202843 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\") " pod="openstack/nova-metadata-0" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.202958 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-config-data\") pod \"nova-metadata-0\" (UID: \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\") " pod="openstack/nova-metadata-0" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.368289 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\") " pod="openstack/nova-metadata-0" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.380306 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmbqd\" (UniqueName: \"kubernetes.io/projected/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-kube-api-access-gmbqd\") pod \"nova-metadata-0\" (UID: \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\") " pod="openstack/nova-metadata-0" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.585487 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.646363 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.656870 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"5b789d99-904d-412c-9c51-1e40f767385e","Type":"ContainerStarted","Data":"ed653a9e35e8735fb33ea210232aa8cf5bbcf21c81864556e6f70af244bfc1bd"} Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.803028 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56e1259f-11d8-4421-84c7-ac3578a9aafb" path="/var/lib/kubelet/pods/56e1259f-11d8-4421-84c7-ac3578a9aafb/volumes" Sep 30 00:32:07 crc kubenswrapper[4809]: I0930 00:32:07.804371 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f585e9ed-4d32-49f6-84af-87ba0d4093d9" path="/var/lib/kubelet/pods/f585e9ed-4d32-49f6-84af-87ba0d4093d9/volumes" Sep 30 00:32:08 crc kubenswrapper[4809]: I0930 00:32:08.137098 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:32:08 crc kubenswrapper[4809]: E0930 00:32:08.629162 4809 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f3759933b8497ce5f405fe055122ae39f258eda904bb478b0c3e7b1d72a8849a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 00:32:08 crc kubenswrapper[4809]: E0930 00:32:08.631179 4809 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f3759933b8497ce5f405fe055122ae39f258eda904bb478b0c3e7b1d72a8849a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 00:32:08 crc kubenswrapper[4809]: E0930 00:32:08.632674 4809 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f3759933b8497ce5f405fe055122ae39f258eda904bb478b0c3e7b1d72a8849a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 00:32:08 crc kubenswrapper[4809]: E0930 00:32:08.632706 4809 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="067ed8b2-f745-47ad-b207-5ac441bc7510" containerName="nova-scheduler-scheduler" Sep 30 00:32:08 crc kubenswrapper[4809]: I0930 00:32:08.673269 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"5b789d99-904d-412c-9c51-1e40f767385e","Type":"ContainerStarted","Data":"4b51d85d0f690fde6b559f79c482e5f235e0bb38b0e4e50ec4b383cdaf219f09"} Sep 30 00:32:08 crc kubenswrapper[4809]: I0930 00:32:08.673396 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Sep 30 00:32:08 crc kubenswrapper[4809]: I0930 00:32:08.675058 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9","Type":"ContainerStarted","Data":"2d127b7e0dabcbe377ef4d1296807b041ae66c691d1b72db69ff6e1ea95442da"} Sep 30 00:32:08 crc kubenswrapper[4809]: I0930 00:32:08.675086 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9","Type":"ContainerStarted","Data":"4471cd95b00fd278801daa9d008d347325d2d01e1fa790af34743bcb0ee8d678"} Sep 30 00:32:08 crc kubenswrapper[4809]: I0930 00:32:08.692102 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.6920811110000002 podStartE2EDuration="2.692081111s" podCreationTimestamp="2025-09-30 00:32:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:32:08.686323133 +0000 UTC m=+1379.722572541" watchObservedRunningTime="2025-09-30 00:32:08.692081111 +0000 UTC m=+1379.728330529" Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.593833 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.658305 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/067ed8b2-f745-47ad-b207-5ac441bc7510-config-data\") pod \"067ed8b2-f745-47ad-b207-5ac441bc7510\" (UID: \"067ed8b2-f745-47ad-b207-5ac441bc7510\") " Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.658402 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-clklh\" (UniqueName: \"kubernetes.io/projected/067ed8b2-f745-47ad-b207-5ac441bc7510-kube-api-access-clklh\") pod \"067ed8b2-f745-47ad-b207-5ac441bc7510\" (UID: \"067ed8b2-f745-47ad-b207-5ac441bc7510\") " Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.658487 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/067ed8b2-f745-47ad-b207-5ac441bc7510-combined-ca-bundle\") pod \"067ed8b2-f745-47ad-b207-5ac441bc7510\" (UID: \"067ed8b2-f745-47ad-b207-5ac441bc7510\") " Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.673040 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/067ed8b2-f745-47ad-b207-5ac441bc7510-kube-api-access-clklh" (OuterVolumeSpecName: "kube-api-access-clklh") pod "067ed8b2-f745-47ad-b207-5ac441bc7510" (UID: "067ed8b2-f745-47ad-b207-5ac441bc7510"). InnerVolumeSpecName "kube-api-access-clklh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:11 crc kubenswrapper[4809]: E0930 00:32:11.695088 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/067ed8b2-f745-47ad-b207-5ac441bc7510-combined-ca-bundle podName:067ed8b2-f745-47ad-b207-5ac441bc7510 nodeName:}" failed. No retries permitted until 2025-09-30 00:32:12.195056882 +0000 UTC m=+1383.231306300 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/067ed8b2-f745-47ad-b207-5ac441bc7510-combined-ca-bundle") pod "067ed8b2-f745-47ad-b207-5ac441bc7510" (UID: "067ed8b2-f745-47ad-b207-5ac441bc7510") : error deleting /var/lib/kubelet/pods/067ed8b2-f745-47ad-b207-5ac441bc7510/volume-subpaths: remove /var/lib/kubelet/pods/067ed8b2-f745-47ad-b207-5ac441bc7510/volume-subpaths: no such file or directory Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.704725 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/067ed8b2-f745-47ad-b207-5ac441bc7510-config-data" (OuterVolumeSpecName: "config-data") pod "067ed8b2-f745-47ad-b207-5ac441bc7510" (UID: "067ed8b2-f745-47ad-b207-5ac441bc7510"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.749944 4809 generic.go:334] "Generic (PLEG): container finished" podID="067ed8b2-f745-47ad-b207-5ac441bc7510" containerID="f3759933b8497ce5f405fe055122ae39f258eda904bb478b0c3e7b1d72a8849a" exitCode=0 Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.750116 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.754354 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"067ed8b2-f745-47ad-b207-5ac441bc7510","Type":"ContainerDied","Data":"f3759933b8497ce5f405fe055122ae39f258eda904bb478b0c3e7b1d72a8849a"} Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.754399 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"067ed8b2-f745-47ad-b207-5ac441bc7510","Type":"ContainerDied","Data":"10642a01c54e1b702bd173441f05bc9ffa1c025a6730740dc9fe2a16e555566a"} Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.754419 4809 scope.go:117] "RemoveContainer" containerID="f3759933b8497ce5f405fe055122ae39f258eda904bb478b0c3e7b1d72a8849a" Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.763769 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/067ed8b2-f745-47ad-b207-5ac441bc7510-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.763795 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-clklh\" (UniqueName: \"kubernetes.io/projected/067ed8b2-f745-47ad-b207-5ac441bc7510-kube-api-access-clklh\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.785949 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.787109 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9","Type":"ContainerStarted","Data":"09f7c1510122742ddfb198741be83dc137d537e714e0eecaee7d379cc430dd1a"} Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.797565 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-2bnnc" event={"ID":"1120e47a-9490-497a-a79b-aa2941e070f7","Type":"ContainerStarted","Data":"38d4a99bab5f95a3b5960d15610683ae25f67ff0350f9ea36cae626455c2a0c9"} Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.803268 4809 generic.go:334] "Generic (PLEG): container finished" podID="8d59630e-01bf-4c23-b523-85e077e9d828" containerID="953f6ece01dae039e25ba7681a76b7c011daf87cedeaf43179abe74dc5f2912f" exitCode=0 Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.803314 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8d59630e-01bf-4c23-b523-85e077e9d828","Type":"ContainerDied","Data":"953f6ece01dae039e25ba7681a76b7c011daf87cedeaf43179abe74dc5f2912f"} Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.803379 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.827753 4809 scope.go:117] "RemoveContainer" containerID="f3759933b8497ce5f405fe055122ae39f258eda904bb478b0c3e7b1d72a8849a" Sep 30 00:32:11 crc kubenswrapper[4809]: E0930 00:32:11.828995 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3759933b8497ce5f405fe055122ae39f258eda904bb478b0c3e7b1d72a8849a\": container with ID starting with f3759933b8497ce5f405fe055122ae39f258eda904bb478b0c3e7b1d72a8849a not found: ID does not exist" containerID="f3759933b8497ce5f405fe055122ae39f258eda904bb478b0c3e7b1d72a8849a" Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.829046 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3759933b8497ce5f405fe055122ae39f258eda904bb478b0c3e7b1d72a8849a"} err="failed to get container status \"f3759933b8497ce5f405fe055122ae39f258eda904bb478b0c3e7b1d72a8849a\": rpc error: code = NotFound desc = could not find container \"f3759933b8497ce5f405fe055122ae39f258eda904bb478b0c3e7b1d72a8849a\": container with ID starting with f3759933b8497ce5f405fe055122ae39f258eda904bb478b0c3e7b1d72a8849a not found: ID does not exist" Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.829073 4809 scope.go:117] "RemoveContainer" containerID="953f6ece01dae039e25ba7681a76b7c011daf87cedeaf43179abe74dc5f2912f" Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.838174 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=5.838154064 podStartE2EDuration="5.838154064s" podCreationTimestamp="2025-09-30 00:32:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:32:11.830237728 +0000 UTC m=+1382.866487136" watchObservedRunningTime="2025-09-30 00:32:11.838154064 +0000 UTC m=+1382.874403472" Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.853530 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-2bnnc" podStartSLOduration=2.05083804 podStartE2EDuration="8.853507263s" podCreationTimestamp="2025-09-30 00:32:03 +0000 UTC" firstStartedPulling="2025-09-30 00:32:04.361347612 +0000 UTC m=+1375.397597010" lastFinishedPulling="2025-09-30 00:32:11.164016825 +0000 UTC m=+1382.200266233" observedRunningTime="2025-09-30 00:32:11.851196811 +0000 UTC m=+1382.887446219" watchObservedRunningTime="2025-09-30 00:32:11.853507263 +0000 UTC m=+1382.889756671" Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.878436 4809 scope.go:117] "RemoveContainer" containerID="064e18ec859c753d0fe1315b1dded864bb4f2e2da08807c46c647370168035b2" Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.967527 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-psjxk\" (UniqueName: \"kubernetes.io/projected/8d59630e-01bf-4c23-b523-85e077e9d828-kube-api-access-psjxk\") pod \"8d59630e-01bf-4c23-b523-85e077e9d828\" (UID: \"8d59630e-01bf-4c23-b523-85e077e9d828\") " Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.967823 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d59630e-01bf-4c23-b523-85e077e9d828-logs\") pod \"8d59630e-01bf-4c23-b523-85e077e9d828\" (UID: \"8d59630e-01bf-4c23-b523-85e077e9d828\") " Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.967855 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d59630e-01bf-4c23-b523-85e077e9d828-combined-ca-bundle\") pod \"8d59630e-01bf-4c23-b523-85e077e9d828\" (UID: \"8d59630e-01bf-4c23-b523-85e077e9d828\") " Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.967876 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d59630e-01bf-4c23-b523-85e077e9d828-config-data\") pod \"8d59630e-01bf-4c23-b523-85e077e9d828\" (UID: \"8d59630e-01bf-4c23-b523-85e077e9d828\") " Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.969673 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d59630e-01bf-4c23-b523-85e077e9d828-logs" (OuterVolumeSpecName: "logs") pod "8d59630e-01bf-4c23-b523-85e077e9d828" (UID: "8d59630e-01bf-4c23-b523-85e077e9d828"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:32:11 crc kubenswrapper[4809]: I0930 00:32:11.971836 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d59630e-01bf-4c23-b523-85e077e9d828-kube-api-access-psjxk" (OuterVolumeSpecName: "kube-api-access-psjxk") pod "8d59630e-01bf-4c23-b523-85e077e9d828" (UID: "8d59630e-01bf-4c23-b523-85e077e9d828"). InnerVolumeSpecName "kube-api-access-psjxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.002129 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d59630e-01bf-4c23-b523-85e077e9d828-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d59630e-01bf-4c23-b523-85e077e9d828" (UID: "8d59630e-01bf-4c23-b523-85e077e9d828"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.004115 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d59630e-01bf-4c23-b523-85e077e9d828-config-data" (OuterVolumeSpecName: "config-data") pod "8d59630e-01bf-4c23-b523-85e077e9d828" (UID: "8d59630e-01bf-4c23-b523-85e077e9d828"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.070044 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d59630e-01bf-4c23-b523-85e077e9d828-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.070092 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d59630e-01bf-4c23-b523-85e077e9d828-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.070107 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-psjxk\" (UniqueName: \"kubernetes.io/projected/8d59630e-01bf-4c23-b523-85e077e9d828-kube-api-access-psjxk\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.070122 4809 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d59630e-01bf-4c23-b523-85e077e9d828-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.213906 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.235511 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.269417 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 00:32:12 crc kubenswrapper[4809]: E0930 00:32:12.270806 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d59630e-01bf-4c23-b523-85e077e9d828" containerName="nova-api-log" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.270837 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d59630e-01bf-4c23-b523-85e077e9d828" containerName="nova-api-log" Sep 30 00:32:12 crc kubenswrapper[4809]: E0930 00:32:12.270875 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="067ed8b2-f745-47ad-b207-5ac441bc7510" containerName="nova-scheduler-scheduler" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.270884 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="067ed8b2-f745-47ad-b207-5ac441bc7510" containerName="nova-scheduler-scheduler" Sep 30 00:32:12 crc kubenswrapper[4809]: E0930 00:32:12.270922 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d59630e-01bf-4c23-b523-85e077e9d828" containerName="nova-api-api" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.270930 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d59630e-01bf-4c23-b523-85e077e9d828" containerName="nova-api-api" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.271178 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d59630e-01bf-4c23-b523-85e077e9d828" containerName="nova-api-log" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.271211 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="067ed8b2-f745-47ad-b207-5ac441bc7510" containerName="nova-scheduler-scheduler" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.271227 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d59630e-01bf-4c23-b523-85e077e9d828" containerName="nova-api-api" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.272780 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.273590 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/067ed8b2-f745-47ad-b207-5ac441bc7510-combined-ca-bundle\") pod \"067ed8b2-f745-47ad-b207-5ac441bc7510\" (UID: \"067ed8b2-f745-47ad-b207-5ac441bc7510\") " Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.278037 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.279242 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/067ed8b2-f745-47ad-b207-5ac441bc7510-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "067ed8b2-f745-47ad-b207-5ac441bc7510" (UID: "067ed8b2-f745-47ad-b207-5ac441bc7510"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.284400 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.378121 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/067ed8b2-f745-47ad-b207-5ac441bc7510-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.382193 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.390931 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.405336 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.407662 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.409632 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.415883 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.450320 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.481410 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c10753d-9437-47e0-b504-a2fede89973d-config-data\") pod \"nova-api-0\" (UID: \"7c10753d-9437-47e0-b504-a2fede89973d\") " pod="openstack/nova-api-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.481525 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c10753d-9437-47e0-b504-a2fede89973d-logs\") pod \"nova-api-0\" (UID: \"7c10753d-9437-47e0-b504-a2fede89973d\") " pod="openstack/nova-api-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.481569 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7skv\" (UniqueName: \"kubernetes.io/projected/7c10753d-9437-47e0-b504-a2fede89973d-kube-api-access-s7skv\") pod \"nova-api-0\" (UID: \"7c10753d-9437-47e0-b504-a2fede89973d\") " pod="openstack/nova-api-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.481717 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c10753d-9437-47e0-b504-a2fede89973d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7c10753d-9437-47e0-b504-a2fede89973d\") " pod="openstack/nova-api-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.582898 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa11667b-2b05-4bfb-a508-c7e8d3b71bf3-config-data\") pod \"nova-scheduler-0\" (UID: \"fa11667b-2b05-4bfb-a508-c7e8d3b71bf3\") " pod="openstack/nova-scheduler-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.582974 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c10753d-9437-47e0-b504-a2fede89973d-config-data\") pod \"nova-api-0\" (UID: \"7c10753d-9437-47e0-b504-a2fede89973d\") " pod="openstack/nova-api-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.583010 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2x6g9\" (UniqueName: \"kubernetes.io/projected/fa11667b-2b05-4bfb-a508-c7e8d3b71bf3-kube-api-access-2x6g9\") pod \"nova-scheduler-0\" (UID: \"fa11667b-2b05-4bfb-a508-c7e8d3b71bf3\") " pod="openstack/nova-scheduler-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.583036 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c10753d-9437-47e0-b504-a2fede89973d-logs\") pod \"nova-api-0\" (UID: \"7c10753d-9437-47e0-b504-a2fede89973d\") " pod="openstack/nova-api-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.583055 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa11667b-2b05-4bfb-a508-c7e8d3b71bf3-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fa11667b-2b05-4bfb-a508-c7e8d3b71bf3\") " pod="openstack/nova-scheduler-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.583078 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7skv\" (UniqueName: \"kubernetes.io/projected/7c10753d-9437-47e0-b504-a2fede89973d-kube-api-access-s7skv\") pod \"nova-api-0\" (UID: \"7c10753d-9437-47e0-b504-a2fede89973d\") " pod="openstack/nova-api-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.583304 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c10753d-9437-47e0-b504-a2fede89973d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7c10753d-9437-47e0-b504-a2fede89973d\") " pod="openstack/nova-api-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.583514 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c10753d-9437-47e0-b504-a2fede89973d-logs\") pod \"nova-api-0\" (UID: \"7c10753d-9437-47e0-b504-a2fede89973d\") " pod="openstack/nova-api-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.592195 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c10753d-9437-47e0-b504-a2fede89973d-config-data\") pod \"nova-api-0\" (UID: \"7c10753d-9437-47e0-b504-a2fede89973d\") " pod="openstack/nova-api-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.593240 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c10753d-9437-47e0-b504-a2fede89973d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7c10753d-9437-47e0-b504-a2fede89973d\") " pod="openstack/nova-api-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.598524 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7skv\" (UniqueName: \"kubernetes.io/projected/7c10753d-9437-47e0-b504-a2fede89973d-kube-api-access-s7skv\") pod \"nova-api-0\" (UID: \"7c10753d-9437-47e0-b504-a2fede89973d\") " pod="openstack/nova-api-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.638198 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.646578 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.646621 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.684762 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2x6g9\" (UniqueName: \"kubernetes.io/projected/fa11667b-2b05-4bfb-a508-c7e8d3b71bf3-kube-api-access-2x6g9\") pod \"nova-scheduler-0\" (UID: \"fa11667b-2b05-4bfb-a508-c7e8d3b71bf3\") " pod="openstack/nova-scheduler-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.684851 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa11667b-2b05-4bfb-a508-c7e8d3b71bf3-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fa11667b-2b05-4bfb-a508-c7e8d3b71bf3\") " pod="openstack/nova-scheduler-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.684953 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa11667b-2b05-4bfb-a508-c7e8d3b71bf3-config-data\") pod \"nova-scheduler-0\" (UID: \"fa11667b-2b05-4bfb-a508-c7e8d3b71bf3\") " pod="openstack/nova-scheduler-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.692396 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa11667b-2b05-4bfb-a508-c7e8d3b71bf3-config-data\") pod \"nova-scheduler-0\" (UID: \"fa11667b-2b05-4bfb-a508-c7e8d3b71bf3\") " pod="openstack/nova-scheduler-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.701698 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa11667b-2b05-4bfb-a508-c7e8d3b71bf3-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fa11667b-2b05-4bfb-a508-c7e8d3b71bf3\") " pod="openstack/nova-scheduler-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.710209 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2x6g9\" (UniqueName: \"kubernetes.io/projected/fa11667b-2b05-4bfb-a508-c7e8d3b71bf3-kube-api-access-2x6g9\") pod \"nova-scheduler-0\" (UID: \"fa11667b-2b05-4bfb-a508-c7e8d3b71bf3\") " pod="openstack/nova-scheduler-0" Sep 30 00:32:12 crc kubenswrapper[4809]: I0930 00:32:12.724109 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:32:13 crc kubenswrapper[4809]: W0930 00:32:13.153350 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c10753d_9437_47e0_b504_a2fede89973d.slice/crio-db705989ef545346ea5c6bf5a4dbf049f7c2df6e8e540085ece2ab862947c655 WatchSource:0}: Error finding container db705989ef545346ea5c6bf5a4dbf049f7c2df6e8e540085ece2ab862947c655: Status 404 returned error can't find the container with id db705989ef545346ea5c6bf5a4dbf049f7c2df6e8e540085ece2ab862947c655 Sep 30 00:32:13 crc kubenswrapper[4809]: I0930 00:32:13.156225 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:32:13 crc kubenswrapper[4809]: I0930 00:32:13.299347 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:32:13 crc kubenswrapper[4809]: W0930 00:32:13.305256 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa11667b_2b05_4bfb_a508_c7e8d3b71bf3.slice/crio-650a827ee309a1cf392f4a5c31235a4cfa5a7f568e0d4663aebf17d7aedbe182 WatchSource:0}: Error finding container 650a827ee309a1cf392f4a5c31235a4cfa5a7f568e0d4663aebf17d7aedbe182: Status 404 returned error can't find the container with id 650a827ee309a1cf392f4a5c31235a4cfa5a7f568e0d4663aebf17d7aedbe182 Sep 30 00:32:13 crc kubenswrapper[4809]: I0930 00:32:13.704903 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="067ed8b2-f745-47ad-b207-5ac441bc7510" path="/var/lib/kubelet/pods/067ed8b2-f745-47ad-b207-5ac441bc7510/volumes" Sep 30 00:32:13 crc kubenswrapper[4809]: I0930 00:32:13.705683 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d59630e-01bf-4c23-b523-85e077e9d828" path="/var/lib/kubelet/pods/8d59630e-01bf-4c23-b523-85e077e9d828/volumes" Sep 30 00:32:13 crc kubenswrapper[4809]: I0930 00:32:13.869902 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fa11667b-2b05-4bfb-a508-c7e8d3b71bf3","Type":"ContainerStarted","Data":"3cfba605cda60c9954d08c679ee8e9232748d778897eed966e4c0ad2ddff3905"} Sep 30 00:32:13 crc kubenswrapper[4809]: I0930 00:32:13.869986 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fa11667b-2b05-4bfb-a508-c7e8d3b71bf3","Type":"ContainerStarted","Data":"650a827ee309a1cf392f4a5c31235a4cfa5a7f568e0d4663aebf17d7aedbe182"} Sep 30 00:32:13 crc kubenswrapper[4809]: I0930 00:32:13.874416 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7c10753d-9437-47e0-b504-a2fede89973d","Type":"ContainerStarted","Data":"9839bbad7adaf281459d5fcb34dc9bca4f9c3d9850cc206c91a548b2affcf14c"} Sep 30 00:32:13 crc kubenswrapper[4809]: I0930 00:32:13.874471 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7c10753d-9437-47e0-b504-a2fede89973d","Type":"ContainerStarted","Data":"2480510bd0ba09b7f7e5f6b92445258bef1db33916f7de04d87e6422ce439a4f"} Sep 30 00:32:13 crc kubenswrapper[4809]: I0930 00:32:13.874481 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7c10753d-9437-47e0-b504-a2fede89973d","Type":"ContainerStarted","Data":"db705989ef545346ea5c6bf5a4dbf049f7c2df6e8e540085ece2ab862947c655"} Sep 30 00:32:13 crc kubenswrapper[4809]: I0930 00:32:13.888666 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.888627388 podStartE2EDuration="1.888627388s" podCreationTimestamp="2025-09-30 00:32:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:32:13.884588687 +0000 UTC m=+1384.920838095" watchObservedRunningTime="2025-09-30 00:32:13.888627388 +0000 UTC m=+1384.924876796" Sep 30 00:32:13 crc kubenswrapper[4809]: I0930 00:32:13.901998 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.901978643 podStartE2EDuration="1.901978643s" podCreationTimestamp="2025-09-30 00:32:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:32:13.899345121 +0000 UTC m=+1384.935594529" watchObservedRunningTime="2025-09-30 00:32:13.901978643 +0000 UTC m=+1384.938228051" Sep 30 00:32:14 crc kubenswrapper[4809]: I0930 00:32:14.887408 4809 generic.go:334] "Generic (PLEG): container finished" podID="1120e47a-9490-497a-a79b-aa2941e070f7" containerID="38d4a99bab5f95a3b5960d15610683ae25f67ff0350f9ea36cae626455c2a0c9" exitCode=0 Sep 30 00:32:14 crc kubenswrapper[4809]: I0930 00:32:14.887506 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-2bnnc" event={"ID":"1120e47a-9490-497a-a79b-aa2941e070f7","Type":"ContainerDied","Data":"38d4a99bab5f95a3b5960d15610683ae25f67ff0350f9ea36cae626455c2a0c9"} Sep 30 00:32:16 crc kubenswrapper[4809]: I0930 00:32:16.399583 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-2bnnc" Sep 30 00:32:16 crc kubenswrapper[4809]: I0930 00:32:16.568547 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1120e47a-9490-497a-a79b-aa2941e070f7-scripts\") pod \"1120e47a-9490-497a-a79b-aa2941e070f7\" (UID: \"1120e47a-9490-497a-a79b-aa2941e070f7\") " Sep 30 00:32:16 crc kubenswrapper[4809]: I0930 00:32:16.568835 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfjpv\" (UniqueName: \"kubernetes.io/projected/1120e47a-9490-497a-a79b-aa2941e070f7-kube-api-access-tfjpv\") pod \"1120e47a-9490-497a-a79b-aa2941e070f7\" (UID: \"1120e47a-9490-497a-a79b-aa2941e070f7\") " Sep 30 00:32:16 crc kubenswrapper[4809]: I0930 00:32:16.568946 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1120e47a-9490-497a-a79b-aa2941e070f7-combined-ca-bundle\") pod \"1120e47a-9490-497a-a79b-aa2941e070f7\" (UID: \"1120e47a-9490-497a-a79b-aa2941e070f7\") " Sep 30 00:32:16 crc kubenswrapper[4809]: I0930 00:32:16.568994 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1120e47a-9490-497a-a79b-aa2941e070f7-config-data\") pod \"1120e47a-9490-497a-a79b-aa2941e070f7\" (UID: \"1120e47a-9490-497a-a79b-aa2941e070f7\") " Sep 30 00:32:16 crc kubenswrapper[4809]: I0930 00:32:16.574833 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1120e47a-9490-497a-a79b-aa2941e070f7-scripts" (OuterVolumeSpecName: "scripts") pod "1120e47a-9490-497a-a79b-aa2941e070f7" (UID: "1120e47a-9490-497a-a79b-aa2941e070f7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:16 crc kubenswrapper[4809]: I0930 00:32:16.574860 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1120e47a-9490-497a-a79b-aa2941e070f7-kube-api-access-tfjpv" (OuterVolumeSpecName: "kube-api-access-tfjpv") pod "1120e47a-9490-497a-a79b-aa2941e070f7" (UID: "1120e47a-9490-497a-a79b-aa2941e070f7"). InnerVolumeSpecName "kube-api-access-tfjpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:16 crc kubenswrapper[4809]: I0930 00:32:16.607254 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1120e47a-9490-497a-a79b-aa2941e070f7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1120e47a-9490-497a-a79b-aa2941e070f7" (UID: "1120e47a-9490-497a-a79b-aa2941e070f7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:16 crc kubenswrapper[4809]: I0930 00:32:16.615049 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1120e47a-9490-497a-a79b-aa2941e070f7-config-data" (OuterVolumeSpecName: "config-data") pod "1120e47a-9490-497a-a79b-aa2941e070f7" (UID: "1120e47a-9490-497a-a79b-aa2941e070f7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:16 crc kubenswrapper[4809]: I0930 00:32:16.671986 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1120e47a-9490-497a-a79b-aa2941e070f7-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:16 crc kubenswrapper[4809]: I0930 00:32:16.672024 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfjpv\" (UniqueName: \"kubernetes.io/projected/1120e47a-9490-497a-a79b-aa2941e070f7-kube-api-access-tfjpv\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:16 crc kubenswrapper[4809]: I0930 00:32:16.672040 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1120e47a-9490-497a-a79b-aa2941e070f7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:16 crc kubenswrapper[4809]: I0930 00:32:16.672052 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1120e47a-9490-497a-a79b-aa2941e070f7-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:16 crc kubenswrapper[4809]: I0930 00:32:16.907757 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-2bnnc" event={"ID":"1120e47a-9490-497a-a79b-aa2941e070f7","Type":"ContainerDied","Data":"00a56c1a7ad60c64534e9b666aa9180217f87b112a81730bb846869a75289cae"} Sep 30 00:32:16 crc kubenswrapper[4809]: I0930 00:32:16.907801 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="00a56c1a7ad60c64534e9b666aa9180217f87b112a81730bb846869a75289cae" Sep 30 00:32:16 crc kubenswrapper[4809]: I0930 00:32:16.907877 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-2bnnc" Sep 30 00:32:17 crc kubenswrapper[4809]: I0930 00:32:17.014096 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 00:32:17 crc kubenswrapper[4809]: I0930 00:32:17.014306 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="f7a3ddad-430a-4a9f-8f4c-56ac6431193e" containerName="kube-state-metrics" containerID="cri-o://011750b10155869cfe0f552c2ff1ebfca96a336a959b7e71ebd07d7ca05db0ee" gracePeriod=30 Sep 30 00:32:17 crc kubenswrapper[4809]: I0930 00:32:17.113911 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Sep 30 00:32:17 crc kubenswrapper[4809]: I0930 00:32:17.282126 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mysqld-exporter-0"] Sep 30 00:32:17 crc kubenswrapper[4809]: I0930 00:32:17.282333 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/mysqld-exporter-0" podUID="1097b4e8-b93a-4350-b74c-acf37be3a84f" containerName="mysqld-exporter" containerID="cri-o://1d34205b33dbd022c8276ab14c2238065ebe82342cc3812d0abee5fb4f987479" gracePeriod=30 Sep 30 00:32:17 crc kubenswrapper[4809]: I0930 00:32:17.647490 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 00:32:17 crc kubenswrapper[4809]: I0930 00:32:17.650504 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 00:32:17 crc kubenswrapper[4809]: I0930 00:32:17.733831 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 00:32:17 crc kubenswrapper[4809]: I0930 00:32:17.922464 4809 generic.go:334] "Generic (PLEG): container finished" podID="1097b4e8-b93a-4350-b74c-acf37be3a84f" containerID="1d34205b33dbd022c8276ab14c2238065ebe82342cc3812d0abee5fb4f987479" exitCode=2 Sep 30 00:32:17 crc kubenswrapper[4809]: I0930 00:32:17.922544 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-0" event={"ID":"1097b4e8-b93a-4350-b74c-acf37be3a84f","Type":"ContainerDied","Data":"1d34205b33dbd022c8276ab14c2238065ebe82342cc3812d0abee5fb4f987479"} Sep 30 00:32:17 crc kubenswrapper[4809]: I0930 00:32:17.929614 4809 generic.go:334] "Generic (PLEG): container finished" podID="f7a3ddad-430a-4a9f-8f4c-56ac6431193e" containerID="011750b10155869cfe0f552c2ff1ebfca96a336a959b7e71ebd07d7ca05db0ee" exitCode=2 Sep 30 00:32:17 crc kubenswrapper[4809]: I0930 00:32:17.929702 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f7a3ddad-430a-4a9f-8f4c-56ac6431193e","Type":"ContainerDied","Data":"011750b10155869cfe0f552c2ff1ebfca96a336a959b7e71ebd07d7ca05db0ee"} Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.075614 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-0" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.210170 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwlnm\" (UniqueName: \"kubernetes.io/projected/1097b4e8-b93a-4350-b74c-acf37be3a84f-kube-api-access-rwlnm\") pod \"1097b4e8-b93a-4350-b74c-acf37be3a84f\" (UID: \"1097b4e8-b93a-4350-b74c-acf37be3a84f\") " Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.210301 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1097b4e8-b93a-4350-b74c-acf37be3a84f-config-data\") pod \"1097b4e8-b93a-4350-b74c-acf37be3a84f\" (UID: \"1097b4e8-b93a-4350-b74c-acf37be3a84f\") " Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.210340 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1097b4e8-b93a-4350-b74c-acf37be3a84f-combined-ca-bundle\") pod \"1097b4e8-b93a-4350-b74c-acf37be3a84f\" (UID: \"1097b4e8-b93a-4350-b74c-acf37be3a84f\") " Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.215601 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1097b4e8-b93a-4350-b74c-acf37be3a84f-kube-api-access-rwlnm" (OuterVolumeSpecName: "kube-api-access-rwlnm") pod "1097b4e8-b93a-4350-b74c-acf37be3a84f" (UID: "1097b4e8-b93a-4350-b74c-acf37be3a84f"). InnerVolumeSpecName "kube-api-access-rwlnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.269003 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1097b4e8-b93a-4350-b74c-acf37be3a84f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1097b4e8-b93a-4350-b74c-acf37be3a84f" (UID: "1097b4e8-b93a-4350-b74c-acf37be3a84f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.293319 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.296789 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1097b4e8-b93a-4350-b74c-acf37be3a84f-config-data" (OuterVolumeSpecName: "config-data") pod "1097b4e8-b93a-4350-b74c-acf37be3a84f" (UID: "1097b4e8-b93a-4350-b74c-acf37be3a84f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.313966 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwlnm\" (UniqueName: \"kubernetes.io/projected/1097b4e8-b93a-4350-b74c-acf37be3a84f-kube-api-access-rwlnm\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.314002 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1097b4e8-b93a-4350-b74c-acf37be3a84f-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.314012 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1097b4e8-b93a-4350-b74c-acf37be3a84f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.415606 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nhm4w\" (UniqueName: \"kubernetes.io/projected/f7a3ddad-430a-4a9f-8f4c-56ac6431193e-kube-api-access-nhm4w\") pod \"f7a3ddad-430a-4a9f-8f4c-56ac6431193e\" (UID: \"f7a3ddad-430a-4a9f-8f4c-56ac6431193e\") " Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.421844 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7a3ddad-430a-4a9f-8f4c-56ac6431193e-kube-api-access-nhm4w" (OuterVolumeSpecName: "kube-api-access-nhm4w") pod "f7a3ddad-430a-4a9f-8f4c-56ac6431193e" (UID: "f7a3ddad-430a-4a9f-8f4c-56ac6431193e"). InnerVolumeSpecName "kube-api-access-nhm4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.504704 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Sep 30 00:32:18 crc kubenswrapper[4809]: E0930 00:32:18.505151 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7a3ddad-430a-4a9f-8f4c-56ac6431193e" containerName="kube-state-metrics" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.505172 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7a3ddad-430a-4a9f-8f4c-56ac6431193e" containerName="kube-state-metrics" Sep 30 00:32:18 crc kubenswrapper[4809]: E0930 00:32:18.505193 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1120e47a-9490-497a-a79b-aa2941e070f7" containerName="aodh-db-sync" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.505202 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="1120e47a-9490-497a-a79b-aa2941e070f7" containerName="aodh-db-sync" Sep 30 00:32:18 crc kubenswrapper[4809]: E0930 00:32:18.505248 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1097b4e8-b93a-4350-b74c-acf37be3a84f" containerName="mysqld-exporter" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.505258 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="1097b4e8-b93a-4350-b74c-acf37be3a84f" containerName="mysqld-exporter" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.505447 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="1097b4e8-b93a-4350-b74c-acf37be3a84f" containerName="mysqld-exporter" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.505474 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7a3ddad-430a-4a9f-8f4c-56ac6431193e" containerName="kube-state-metrics" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.505486 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="1120e47a-9490-497a-a79b-aa2941e070f7" containerName="aodh-db-sync" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.507483 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.514028 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.514254 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.517662 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nhm4w\" (UniqueName: \"kubernetes.io/projected/f7a3ddad-430a-4a9f-8f4c-56ac6431193e-kube-api-access-nhm4w\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.518548 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-vzdp5" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.542420 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.619720 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f19829b9-a07b-4348-b2c0-31330ecaac0c-config-data\") pod \"aodh-0\" (UID: \"f19829b9-a07b-4348-b2c0-31330ecaac0c\") " pod="openstack/aodh-0" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.619825 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55pq9\" (UniqueName: \"kubernetes.io/projected/f19829b9-a07b-4348-b2c0-31330ecaac0c-kube-api-access-55pq9\") pod \"aodh-0\" (UID: \"f19829b9-a07b-4348-b2c0-31330ecaac0c\") " pod="openstack/aodh-0" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.619868 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f19829b9-a07b-4348-b2c0-31330ecaac0c-combined-ca-bundle\") pod \"aodh-0\" (UID: \"f19829b9-a07b-4348-b2c0-31330ecaac0c\") " pod="openstack/aodh-0" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.619910 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f19829b9-a07b-4348-b2c0-31330ecaac0c-scripts\") pod \"aodh-0\" (UID: \"f19829b9-a07b-4348-b2c0-31330ecaac0c\") " pod="openstack/aodh-0" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.660406 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.226:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.660434 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.226:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.722625 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f19829b9-a07b-4348-b2c0-31330ecaac0c-config-data\") pod \"aodh-0\" (UID: \"f19829b9-a07b-4348-b2c0-31330ecaac0c\") " pod="openstack/aodh-0" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.722735 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55pq9\" (UniqueName: \"kubernetes.io/projected/f19829b9-a07b-4348-b2c0-31330ecaac0c-kube-api-access-55pq9\") pod \"aodh-0\" (UID: \"f19829b9-a07b-4348-b2c0-31330ecaac0c\") " pod="openstack/aodh-0" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.722773 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f19829b9-a07b-4348-b2c0-31330ecaac0c-combined-ca-bundle\") pod \"aodh-0\" (UID: \"f19829b9-a07b-4348-b2c0-31330ecaac0c\") " pod="openstack/aodh-0" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.722808 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f19829b9-a07b-4348-b2c0-31330ecaac0c-scripts\") pod \"aodh-0\" (UID: \"f19829b9-a07b-4348-b2c0-31330ecaac0c\") " pod="openstack/aodh-0" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.731613 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f19829b9-a07b-4348-b2c0-31330ecaac0c-config-data\") pod \"aodh-0\" (UID: \"f19829b9-a07b-4348-b2c0-31330ecaac0c\") " pod="openstack/aodh-0" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.736137 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f19829b9-a07b-4348-b2c0-31330ecaac0c-combined-ca-bundle\") pod \"aodh-0\" (UID: \"f19829b9-a07b-4348-b2c0-31330ecaac0c\") " pod="openstack/aodh-0" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.740129 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55pq9\" (UniqueName: \"kubernetes.io/projected/f19829b9-a07b-4348-b2c0-31330ecaac0c-kube-api-access-55pq9\") pod \"aodh-0\" (UID: \"f19829b9-a07b-4348-b2c0-31330ecaac0c\") " pod="openstack/aodh-0" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.751985 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f19829b9-a07b-4348-b2c0-31330ecaac0c-scripts\") pod \"aodh-0\" (UID: \"f19829b9-a07b-4348-b2c0-31330ecaac0c\") " pod="openstack/aodh-0" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.837881 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.963339 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f7a3ddad-430a-4a9f-8f4c-56ac6431193e","Type":"ContainerDied","Data":"1191142c5ff18757a5884dffefc4270e022207a272cebd0e5d68c63db9ae6de3"} Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.967295 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-0" event={"ID":"1097b4e8-b93a-4350-b74c-acf37be3a84f","Type":"ContainerDied","Data":"b89c46e0a9cc32018b1c2a520287924a72e0e1d7b56d68b45ef88ebdaa74a2c3"} Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.964983 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-0" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.963343 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 00:32:18 crc kubenswrapper[4809]: I0930 00:32:18.967460 4809 scope.go:117] "RemoveContainer" containerID="011750b10155869cfe0f552c2ff1ebfca96a336a959b7e71ebd07d7ca05db0ee" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.006542 4809 scope.go:117] "RemoveContainer" containerID="1d34205b33dbd022c8276ab14c2238065ebe82342cc3812d0abee5fb4f987479" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.063979 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.093707 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.103695 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mysqld-exporter-0"] Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.139237 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mysqld-exporter-0"] Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.170282 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.172655 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.179844 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.180021 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.211696 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mysqld-exporter-0"] Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.217362 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.220346 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"mysqld-exporter-config-data" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.220414 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-mysqld-exporter-svc" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.223302 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.248177 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-0"] Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.249559 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mysqld-exporter-tls-certs\" (UniqueName: \"kubernetes.io/secret/5713929f-9da0-4005-8396-ff4a878a552c-mysqld-exporter-tls-certs\") pod \"mysqld-exporter-0\" (UID: \"5713929f-9da0-4005-8396-ff4a878a552c\") " pod="openstack/mysqld-exporter-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.249598 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5713929f-9da0-4005-8396-ff4a878a552c-combined-ca-bundle\") pod \"mysqld-exporter-0\" (UID: \"5713929f-9da0-4005-8396-ff4a878a552c\") " pod="openstack/mysqld-exporter-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.249667 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qd8lh\" (UniqueName: \"kubernetes.io/projected/75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1-kube-api-access-qd8lh\") pod \"kube-state-metrics-0\" (UID: \"75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1\") " pod="openstack/kube-state-metrics-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.249722 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1\") " pod="openstack/kube-state-metrics-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.249758 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1\") " pod="openstack/kube-state-metrics-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.249794 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1\") " pod="openstack/kube-state-metrics-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.249825 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5713929f-9da0-4005-8396-ff4a878a552c-config-data\") pod \"mysqld-exporter-0\" (UID: \"5713929f-9da0-4005-8396-ff4a878a552c\") " pod="openstack/mysqld-exporter-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.249959 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6w4dg\" (UniqueName: \"kubernetes.io/projected/5713929f-9da0-4005-8396-ff4a878a552c-kube-api-access-6w4dg\") pod \"mysqld-exporter-0\" (UID: \"5713929f-9da0-4005-8396-ff4a878a552c\") " pod="openstack/mysqld-exporter-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.353114 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mysqld-exporter-tls-certs\" (UniqueName: \"kubernetes.io/secret/5713929f-9da0-4005-8396-ff4a878a552c-mysqld-exporter-tls-certs\") pod \"mysqld-exporter-0\" (UID: \"5713929f-9da0-4005-8396-ff4a878a552c\") " pod="openstack/mysqld-exporter-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.354095 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5713929f-9da0-4005-8396-ff4a878a552c-combined-ca-bundle\") pod \"mysqld-exporter-0\" (UID: \"5713929f-9da0-4005-8396-ff4a878a552c\") " pod="openstack/mysqld-exporter-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.354160 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qd8lh\" (UniqueName: \"kubernetes.io/projected/75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1-kube-api-access-qd8lh\") pod \"kube-state-metrics-0\" (UID: \"75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1\") " pod="openstack/kube-state-metrics-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.354200 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1\") " pod="openstack/kube-state-metrics-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.354236 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1\") " pod="openstack/kube-state-metrics-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.354274 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1\") " pod="openstack/kube-state-metrics-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.354311 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5713929f-9da0-4005-8396-ff4a878a552c-config-data\") pod \"mysqld-exporter-0\" (UID: \"5713929f-9da0-4005-8396-ff4a878a552c\") " pod="openstack/mysqld-exporter-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.355559 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6w4dg\" (UniqueName: \"kubernetes.io/projected/5713929f-9da0-4005-8396-ff4a878a552c-kube-api-access-6w4dg\") pod \"mysqld-exporter-0\" (UID: \"5713929f-9da0-4005-8396-ff4a878a552c\") " pod="openstack/mysqld-exporter-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.361760 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5713929f-9da0-4005-8396-ff4a878a552c-config-data\") pod \"mysqld-exporter-0\" (UID: \"5713929f-9da0-4005-8396-ff4a878a552c\") " pod="openstack/mysqld-exporter-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.364933 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5713929f-9da0-4005-8396-ff4a878a552c-combined-ca-bundle\") pod \"mysqld-exporter-0\" (UID: \"5713929f-9da0-4005-8396-ff4a878a552c\") " pod="openstack/mysqld-exporter-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.365924 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1\") " pod="openstack/kube-state-metrics-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.368902 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1\") " pod="openstack/kube-state-metrics-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.369463 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mysqld-exporter-tls-certs\" (UniqueName: \"kubernetes.io/secret/5713929f-9da0-4005-8396-ff4a878a552c-mysqld-exporter-tls-certs\") pod \"mysqld-exporter-0\" (UID: \"5713929f-9da0-4005-8396-ff4a878a552c\") " pod="openstack/mysqld-exporter-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.372418 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1\") " pod="openstack/kube-state-metrics-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.377117 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qd8lh\" (UniqueName: \"kubernetes.io/projected/75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1-kube-api-access-qd8lh\") pod \"kube-state-metrics-0\" (UID: \"75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1\") " pod="openstack/kube-state-metrics-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.377800 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6w4dg\" (UniqueName: \"kubernetes.io/projected/5713929f-9da0-4005-8396-ff4a878a552c-kube-api-access-6w4dg\") pod \"mysqld-exporter-0\" (UID: \"5713929f-9da0-4005-8396-ff4a878a552c\") " pod="openstack/mysqld-exporter-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.390428 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.509996 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.554063 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-0" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.712233 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1097b4e8-b93a-4350-b74c-acf37be3a84f" path="/var/lib/kubelet/pods/1097b4e8-b93a-4350-b74c-acf37be3a84f/volumes" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.713073 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7a3ddad-430a-4a9f-8f4c-56ac6431193e" path="/var/lib/kubelet/pods/f7a3ddad-430a-4a9f-8f4c-56ac6431193e/volumes" Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.962598 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.962849 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="67594e0a-af59-4b84-8713-176fb13ca209" containerName="ceilometer-central-agent" containerID="cri-o://3d51af67b94bee2044fa7c3079ea816b42f4cd4c3af3750d932aeeb6bf2eb63f" gracePeriod=30 Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.962955 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="67594e0a-af59-4b84-8713-176fb13ca209" containerName="proxy-httpd" containerID="cri-o://839cdaf9460d9f2a2a49eb8eb85fc444cecb6412ae509e32f197411a35d4e5d0" gracePeriod=30 Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.962990 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="67594e0a-af59-4b84-8713-176fb13ca209" containerName="sg-core" containerID="cri-o://90cb7f354da625769ff30571abba954bef8bbc57f04906ec8c34b453f33fa790" gracePeriod=30 Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.963018 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="67594e0a-af59-4b84-8713-176fb13ca209" containerName="ceilometer-notification-agent" containerID="cri-o://c427ea505545f20e87e6fbef1a08bd14b856926e89d54c5a639988436c04283d" gracePeriod=30 Sep 30 00:32:19 crc kubenswrapper[4809]: I0930 00:32:19.992244 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f19829b9-a07b-4348-b2c0-31330ecaac0c","Type":"ContainerStarted","Data":"ad4c9369e9b77febfdf3e912895257dbdf1d79ccf14a8719395e95ff93fa3c11"} Sep 30 00:32:20 crc kubenswrapper[4809]: I0930 00:32:20.370101 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 00:32:20 crc kubenswrapper[4809]: I0930 00:32:20.386038 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-0"] Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.045147 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-0" event={"ID":"5713929f-9da0-4005-8396-ff4a878a552c","Type":"ContainerStarted","Data":"512d483ad28bdf040c5f6b7b0369a281f00838dc50ee5cd04c84846ff1a36fa3"} Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.059558 4809 generic.go:334] "Generic (PLEG): container finished" podID="67594e0a-af59-4b84-8713-176fb13ca209" containerID="839cdaf9460d9f2a2a49eb8eb85fc444cecb6412ae509e32f197411a35d4e5d0" exitCode=0 Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.059607 4809 generic.go:334] "Generic (PLEG): container finished" podID="67594e0a-af59-4b84-8713-176fb13ca209" containerID="90cb7f354da625769ff30571abba954bef8bbc57f04906ec8c34b453f33fa790" exitCode=2 Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.059617 4809 generic.go:334] "Generic (PLEG): container finished" podID="67594e0a-af59-4b84-8713-176fb13ca209" containerID="c427ea505545f20e87e6fbef1a08bd14b856926e89d54c5a639988436c04283d" exitCode=0 Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.059626 4809 generic.go:334] "Generic (PLEG): container finished" podID="67594e0a-af59-4b84-8713-176fb13ca209" containerID="3d51af67b94bee2044fa7c3079ea816b42f4cd4c3af3750d932aeeb6bf2eb63f" exitCode=0 Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.059619 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"67594e0a-af59-4b84-8713-176fb13ca209","Type":"ContainerDied","Data":"839cdaf9460d9f2a2a49eb8eb85fc444cecb6412ae509e32f197411a35d4e5d0"} Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.059699 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"67594e0a-af59-4b84-8713-176fb13ca209","Type":"ContainerDied","Data":"90cb7f354da625769ff30571abba954bef8bbc57f04906ec8c34b453f33fa790"} Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.059714 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"67594e0a-af59-4b84-8713-176fb13ca209","Type":"ContainerDied","Data":"c427ea505545f20e87e6fbef1a08bd14b856926e89d54c5a639988436c04283d"} Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.059728 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"67594e0a-af59-4b84-8713-176fb13ca209","Type":"ContainerDied","Data":"3d51af67b94bee2044fa7c3079ea816b42f4cd4c3af3750d932aeeb6bf2eb63f"} Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.075467 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1","Type":"ContainerStarted","Data":"18462ce08f58bfdc63d20d1e0cabc64a2c48684b83f613bd26e29cba014c8935"} Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.075753 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.086248 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f19829b9-a07b-4348-b2c0-31330ecaac0c","Type":"ContainerStarted","Data":"4ff1ec423a7885991442e6326227e9c80532f8137b7e65867dbc487aa351d0c3"} Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.102689 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.689807229 podStartE2EDuration="2.102672186s" podCreationTimestamp="2025-09-30 00:32:19 +0000 UTC" firstStartedPulling="2025-09-30 00:32:20.341813276 +0000 UTC m=+1391.378062684" lastFinishedPulling="2025-09-30 00:32:20.754678243 +0000 UTC m=+1391.790927641" observedRunningTime="2025-09-30 00:32:21.095254773 +0000 UTC m=+1392.131504201" watchObservedRunningTime="2025-09-30 00:32:21.102672186 +0000 UTC m=+1392.138921584" Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.487292 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.631858 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-sg-core-conf-yaml\") pod \"67594e0a-af59-4b84-8713-176fb13ca209\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.631947 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/67594e0a-af59-4b84-8713-176fb13ca209-log-httpd\") pod \"67594e0a-af59-4b84-8713-176fb13ca209\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.632121 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/67594e0a-af59-4b84-8713-176fb13ca209-run-httpd\") pod \"67594e0a-af59-4b84-8713-176fb13ca209\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.632152 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-combined-ca-bundle\") pod \"67594e0a-af59-4b84-8713-176fb13ca209\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.632233 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-scripts\") pod \"67594e0a-af59-4b84-8713-176fb13ca209\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.632265 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbg92\" (UniqueName: \"kubernetes.io/projected/67594e0a-af59-4b84-8713-176fb13ca209-kube-api-access-rbg92\") pod \"67594e0a-af59-4b84-8713-176fb13ca209\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.632306 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-config-data\") pod \"67594e0a-af59-4b84-8713-176fb13ca209\" (UID: \"67594e0a-af59-4b84-8713-176fb13ca209\") " Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.633781 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67594e0a-af59-4b84-8713-176fb13ca209-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "67594e0a-af59-4b84-8713-176fb13ca209" (UID: "67594e0a-af59-4b84-8713-176fb13ca209"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.634366 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67594e0a-af59-4b84-8713-176fb13ca209-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "67594e0a-af59-4b84-8713-176fb13ca209" (UID: "67594e0a-af59-4b84-8713-176fb13ca209"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.645465 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-scripts" (OuterVolumeSpecName: "scripts") pod "67594e0a-af59-4b84-8713-176fb13ca209" (UID: "67594e0a-af59-4b84-8713-176fb13ca209"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.645487 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67594e0a-af59-4b84-8713-176fb13ca209-kube-api-access-rbg92" (OuterVolumeSpecName: "kube-api-access-rbg92") pod "67594e0a-af59-4b84-8713-176fb13ca209" (UID: "67594e0a-af59-4b84-8713-176fb13ca209"). InnerVolumeSpecName "kube-api-access-rbg92". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.672962 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "67594e0a-af59-4b84-8713-176fb13ca209" (UID: "67594e0a-af59-4b84-8713-176fb13ca209"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.734455 4809 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.734487 4809 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/67594e0a-af59-4b84-8713-176fb13ca209-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.734495 4809 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/67594e0a-af59-4b84-8713-176fb13ca209-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.734504 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.734513 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbg92\" (UniqueName: \"kubernetes.io/projected/67594e0a-af59-4b84-8713-176fb13ca209-kube-api-access-rbg92\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.768956 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "67594e0a-af59-4b84-8713-176fb13ca209" (UID: "67594e0a-af59-4b84-8713-176fb13ca209"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.820814 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-config-data" (OuterVolumeSpecName: "config-data") pod "67594e0a-af59-4b84-8713-176fb13ca209" (UID: "67594e0a-af59-4b84-8713-176fb13ca209"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.835893 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.835927 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67594e0a-af59-4b84-8713-176fb13ca209-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:21 crc kubenswrapper[4809]: I0930 00:32:21.982012 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-0"] Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.099180 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-0" event={"ID":"5713929f-9da0-4005-8396-ff4a878a552c","Type":"ContainerStarted","Data":"128ccfbca97244fafe5a1ccd679cff342889599d8496063abdce270dc9cce8ac"} Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.102118 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"67594e0a-af59-4b84-8713-176fb13ca209","Type":"ContainerDied","Data":"114ca9dbc79651a995a5c3fc03853e8e9a741c9d3fbba9289b77d4e2c171ef19"} Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.102160 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.102192 4809 scope.go:117] "RemoveContainer" containerID="839cdaf9460d9f2a2a49eb8eb85fc444cecb6412ae509e32f197411a35d4e5d0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.104271 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1","Type":"ContainerStarted","Data":"8e684540363ea923c3ae1daf7e3622b6fb84fd87cfd40e8504dc4daafceab0b8"} Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.131671 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mysqld-exporter-0" podStartSLOduration=2.00331398 podStartE2EDuration="3.131654625s" podCreationTimestamp="2025-09-30 00:32:19 +0000 UTC" firstStartedPulling="2025-09-30 00:32:20.421056123 +0000 UTC m=+1391.457305531" lastFinishedPulling="2025-09-30 00:32:21.549396768 +0000 UTC m=+1392.585646176" observedRunningTime="2025-09-30 00:32:22.124395757 +0000 UTC m=+1393.160645165" watchObservedRunningTime="2025-09-30 00:32:22.131654625 +0000 UTC m=+1393.167904033" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.169107 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.180595 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.197289 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:32:22 crc kubenswrapper[4809]: E0930 00:32:22.197771 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67594e0a-af59-4b84-8713-176fb13ca209" containerName="ceilometer-central-agent" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.197788 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="67594e0a-af59-4b84-8713-176fb13ca209" containerName="ceilometer-central-agent" Sep 30 00:32:22 crc kubenswrapper[4809]: E0930 00:32:22.197802 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67594e0a-af59-4b84-8713-176fb13ca209" containerName="proxy-httpd" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.197809 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="67594e0a-af59-4b84-8713-176fb13ca209" containerName="proxy-httpd" Sep 30 00:32:22 crc kubenswrapper[4809]: E0930 00:32:22.197823 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67594e0a-af59-4b84-8713-176fb13ca209" containerName="sg-core" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.197829 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="67594e0a-af59-4b84-8713-176fb13ca209" containerName="sg-core" Sep 30 00:32:22 crc kubenswrapper[4809]: E0930 00:32:22.197840 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67594e0a-af59-4b84-8713-176fb13ca209" containerName="ceilometer-notification-agent" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.197847 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="67594e0a-af59-4b84-8713-176fb13ca209" containerName="ceilometer-notification-agent" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.198029 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="67594e0a-af59-4b84-8713-176fb13ca209" containerName="sg-core" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.198045 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="67594e0a-af59-4b84-8713-176fb13ca209" containerName="proxy-httpd" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.198059 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="67594e0a-af59-4b84-8713-176fb13ca209" containerName="ceilometer-notification-agent" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.198071 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="67594e0a-af59-4b84-8713-176fb13ca209" containerName="ceilometer-central-agent" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.200531 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.209233 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.209416 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.209585 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.226393 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.345065 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5zsm\" (UniqueName: \"kubernetes.io/projected/9fcc0027-9738-4656-a707-2d814a18e23d-kube-api-access-v5zsm\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.345118 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9fcc0027-9738-4656-a707-2d814a18e23d-run-httpd\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.345221 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-config-data\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.345276 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-scripts\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.345398 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9fcc0027-9738-4656-a707-2d814a18e23d-log-httpd\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.345422 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.345456 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.346577 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.364810 4809 scope.go:117] "RemoveContainer" containerID="90cb7f354da625769ff30571abba954bef8bbc57f04906ec8c34b453f33fa790" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.405994 4809 scope.go:117] "RemoveContainer" containerID="c427ea505545f20e87e6fbef1a08bd14b856926e89d54c5a639988436c04283d" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.450375 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.450501 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.450531 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5zsm\" (UniqueName: \"kubernetes.io/projected/9fcc0027-9738-4656-a707-2d814a18e23d-kube-api-access-v5zsm\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.450551 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9fcc0027-9738-4656-a707-2d814a18e23d-run-httpd\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.450621 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-config-data\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.450699 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-scripts\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.450811 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9fcc0027-9738-4656-a707-2d814a18e23d-log-httpd\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.450832 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.452679 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9fcc0027-9738-4656-a707-2d814a18e23d-run-httpd\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.452952 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9fcc0027-9738-4656-a707-2d814a18e23d-log-httpd\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.456834 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-config-data\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.457005 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.457515 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-scripts\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.458075 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.470865 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.472258 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5zsm\" (UniqueName: \"kubernetes.io/projected/9fcc0027-9738-4656-a707-2d814a18e23d-kube-api-access-v5zsm\") pod \"ceilometer-0\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.477959 4809 scope.go:117] "RemoveContainer" containerID="3d51af67b94bee2044fa7c3079ea816b42f4cd4c3af3750d932aeeb6bf2eb63f" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.543620 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.578553 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.638947 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.638986 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.724348 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 00:32:22 crc kubenswrapper[4809]: I0930 00:32:22.786773 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 00:32:23 crc kubenswrapper[4809]: I0930 00:32:23.082054 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:32:23 crc kubenswrapper[4809]: I0930 00:32:23.120845 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f19829b9-a07b-4348-b2c0-31330ecaac0c","Type":"ContainerStarted","Data":"6cfd04d8958a676558fa805ab65cc36d6110ad38d065ef78d343466b6179951a"} Sep 30 00:32:23 crc kubenswrapper[4809]: I0930 00:32:23.126872 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9fcc0027-9738-4656-a707-2d814a18e23d","Type":"ContainerStarted","Data":"ac7ac25470feaa0e1e1e00583e5836d327c315739ca03472da8083cd460c1391"} Sep 30 00:32:23 crc kubenswrapper[4809]: I0930 00:32:23.337557 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 00:32:23 crc kubenswrapper[4809]: I0930 00:32:23.705001 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67594e0a-af59-4b84-8713-176fb13ca209" path="/var/lib/kubelet/pods/67594e0a-af59-4b84-8713-176fb13ca209/volumes" Sep 30 00:32:23 crc kubenswrapper[4809]: I0930 00:32:23.724809 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7c10753d-9437-47e0-b504-a2fede89973d" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.227:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 00:32:23 crc kubenswrapper[4809]: I0930 00:32:23.724897 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7c10753d-9437-47e0-b504-a2fede89973d" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.227:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 00:32:25 crc kubenswrapper[4809]: I0930 00:32:25.153354 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9fcc0027-9738-4656-a707-2d814a18e23d","Type":"ContainerStarted","Data":"e61d16988d18bd685965f6004764b116c5e833f818ab6c0f28eab6103134cf71"} Sep 30 00:32:26 crc kubenswrapper[4809]: I0930 00:32:26.166239 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f19829b9-a07b-4348-b2c0-31330ecaac0c","Type":"ContainerStarted","Data":"a92d489b2aa6996a01bae939e8589815662a9af03f128329371cc63dc9942d1d"} Sep 30 00:32:27 crc kubenswrapper[4809]: I0930 00:32:27.655672 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 00:32:27 crc kubenswrapper[4809]: I0930 00:32:27.656171 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 00:32:27 crc kubenswrapper[4809]: I0930 00:32:27.661781 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 00:32:27 crc kubenswrapper[4809]: I0930 00:32:27.662300 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 00:32:28 crc kubenswrapper[4809]: I0930 00:32:28.200748 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="f19829b9-a07b-4348-b2c0-31330ecaac0c" containerName="aodh-api" containerID="cri-o://4ff1ec423a7885991442e6326227e9c80532f8137b7e65867dbc487aa351d0c3" gracePeriod=30 Sep 30 00:32:28 crc kubenswrapper[4809]: I0930 00:32:28.201289 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f19829b9-a07b-4348-b2c0-31330ecaac0c","Type":"ContainerStarted","Data":"c61ce033cb68d15345f4bc5a8a25756339bc675d1859cc455c5f9c06877ff875"} Sep 30 00:32:28 crc kubenswrapper[4809]: I0930 00:32:28.201624 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="f19829b9-a07b-4348-b2c0-31330ecaac0c" containerName="aodh-listener" containerID="cri-o://c61ce033cb68d15345f4bc5a8a25756339bc675d1859cc455c5f9c06877ff875" gracePeriod=30 Sep 30 00:32:28 crc kubenswrapper[4809]: I0930 00:32:28.201651 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="f19829b9-a07b-4348-b2c0-31330ecaac0c" containerName="aodh-evaluator" containerID="cri-o://6cfd04d8958a676558fa805ab65cc36d6110ad38d065ef78d343466b6179951a" gracePeriod=30 Sep 30 00:32:28 crc kubenswrapper[4809]: I0930 00:32:28.201703 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="f19829b9-a07b-4348-b2c0-31330ecaac0c" containerName="aodh-notifier" containerID="cri-o://a92d489b2aa6996a01bae939e8589815662a9af03f128329371cc63dc9942d1d" gracePeriod=30 Sep 30 00:32:28 crc kubenswrapper[4809]: I0930 00:32:28.239109 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=2.050461112 podStartE2EDuration="10.239089933s" podCreationTimestamp="2025-09-30 00:32:18 +0000 UTC" firstStartedPulling="2025-09-30 00:32:19.379798998 +0000 UTC m=+1390.416048406" lastFinishedPulling="2025-09-30 00:32:27.568427819 +0000 UTC m=+1398.604677227" observedRunningTime="2025-09-30 00:32:28.230074606 +0000 UTC m=+1399.266324014" watchObservedRunningTime="2025-09-30 00:32:28.239089933 +0000 UTC m=+1399.275339341" Sep 30 00:32:29 crc kubenswrapper[4809]: I0930 00:32:29.214724 4809 generic.go:334] "Generic (PLEG): container finished" podID="f19829b9-a07b-4348-b2c0-31330ecaac0c" containerID="6cfd04d8958a676558fa805ab65cc36d6110ad38d065ef78d343466b6179951a" exitCode=0 Sep 30 00:32:29 crc kubenswrapper[4809]: I0930 00:32:29.214756 4809 generic.go:334] "Generic (PLEG): container finished" podID="f19829b9-a07b-4348-b2c0-31330ecaac0c" containerID="4ff1ec423a7885991442e6326227e9c80532f8137b7e65867dbc487aa351d0c3" exitCode=0 Sep 30 00:32:29 crc kubenswrapper[4809]: I0930 00:32:29.215283 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f19829b9-a07b-4348-b2c0-31330ecaac0c","Type":"ContainerDied","Data":"6cfd04d8958a676558fa805ab65cc36d6110ad38d065ef78d343466b6179951a"} Sep 30 00:32:29 crc kubenswrapper[4809]: I0930 00:32:29.215328 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f19829b9-a07b-4348-b2c0-31330ecaac0c","Type":"ContainerDied","Data":"4ff1ec423a7885991442e6326227e9c80532f8137b7e65867dbc487aa351d0c3"} Sep 30 00:32:29 crc kubenswrapper[4809]: I0930 00:32:29.526885 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 30 00:32:29 crc kubenswrapper[4809]: I0930 00:32:29.968607 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.048862 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20ef09e9-09d7-4dd4-9987-4f0b7cbc292b-combined-ca-bundle\") pod \"20ef09e9-09d7-4dd4-9987-4f0b7cbc292b\" (UID: \"20ef09e9-09d7-4dd4-9987-4f0b7cbc292b\") " Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.048959 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20ef09e9-09d7-4dd4-9987-4f0b7cbc292b-config-data\") pod \"20ef09e9-09d7-4dd4-9987-4f0b7cbc292b\" (UID: \"20ef09e9-09d7-4dd4-9987-4f0b7cbc292b\") " Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.049009 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lhzw\" (UniqueName: \"kubernetes.io/projected/20ef09e9-09d7-4dd4-9987-4f0b7cbc292b-kube-api-access-5lhzw\") pod \"20ef09e9-09d7-4dd4-9987-4f0b7cbc292b\" (UID: \"20ef09e9-09d7-4dd4-9987-4f0b7cbc292b\") " Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.059809 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20ef09e9-09d7-4dd4-9987-4f0b7cbc292b-kube-api-access-5lhzw" (OuterVolumeSpecName: "kube-api-access-5lhzw") pod "20ef09e9-09d7-4dd4-9987-4f0b7cbc292b" (UID: "20ef09e9-09d7-4dd4-9987-4f0b7cbc292b"). InnerVolumeSpecName "kube-api-access-5lhzw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.085157 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20ef09e9-09d7-4dd4-9987-4f0b7cbc292b-config-data" (OuterVolumeSpecName: "config-data") pod "20ef09e9-09d7-4dd4-9987-4f0b7cbc292b" (UID: "20ef09e9-09d7-4dd4-9987-4f0b7cbc292b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.086580 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20ef09e9-09d7-4dd4-9987-4f0b7cbc292b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "20ef09e9-09d7-4dd4-9987-4f0b7cbc292b" (UID: "20ef09e9-09d7-4dd4-9987-4f0b7cbc292b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.151099 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20ef09e9-09d7-4dd4-9987-4f0b7cbc292b-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.151140 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lhzw\" (UniqueName: \"kubernetes.io/projected/20ef09e9-09d7-4dd4-9987-4f0b7cbc292b-kube-api-access-5lhzw\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.151155 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20ef09e9-09d7-4dd4-9987-4f0b7cbc292b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.225703 4809 generic.go:334] "Generic (PLEG): container finished" podID="20ef09e9-09d7-4dd4-9987-4f0b7cbc292b" containerID="2305ae0fe0c64849828e6419855d64df5e0db1a9bfab6462590979798309e268" exitCode=137 Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.225758 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.225776 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"20ef09e9-09d7-4dd4-9987-4f0b7cbc292b","Type":"ContainerDied","Data":"2305ae0fe0c64849828e6419855d64df5e0db1a9bfab6462590979798309e268"} Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.225806 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"20ef09e9-09d7-4dd4-9987-4f0b7cbc292b","Type":"ContainerDied","Data":"903b36ce046a467d1a45673ffed5f0a2221afcd000dab36edaae23233233c358"} Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.225825 4809 scope.go:117] "RemoveContainer" containerID="2305ae0fe0c64849828e6419855d64df5e0db1a9bfab6462590979798309e268" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.233108 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9fcc0027-9738-4656-a707-2d814a18e23d","Type":"ContainerStarted","Data":"c11639b1a9f462a6b647492b60875254f9e379108eff41fa59ee95d671b1c259"} Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.270656 4809 scope.go:117] "RemoveContainer" containerID="2305ae0fe0c64849828e6419855d64df5e0db1a9bfab6462590979798309e268" Sep 30 00:32:30 crc kubenswrapper[4809]: E0930 00:32:30.271205 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2305ae0fe0c64849828e6419855d64df5e0db1a9bfab6462590979798309e268\": container with ID starting with 2305ae0fe0c64849828e6419855d64df5e0db1a9bfab6462590979798309e268 not found: ID does not exist" containerID="2305ae0fe0c64849828e6419855d64df5e0db1a9bfab6462590979798309e268" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.271244 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2305ae0fe0c64849828e6419855d64df5e0db1a9bfab6462590979798309e268"} err="failed to get container status \"2305ae0fe0c64849828e6419855d64df5e0db1a9bfab6462590979798309e268\": rpc error: code = NotFound desc = could not find container \"2305ae0fe0c64849828e6419855d64df5e0db1a9bfab6462590979798309e268\": container with ID starting with 2305ae0fe0c64849828e6419855d64df5e0db1a9bfab6462590979798309e268 not found: ID does not exist" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.303701 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.338697 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.364703 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 00:32:30 crc kubenswrapper[4809]: E0930 00:32:30.365225 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20ef09e9-09d7-4dd4-9987-4f0b7cbc292b" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.365243 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="20ef09e9-09d7-4dd4-9987-4f0b7cbc292b" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.365504 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="20ef09e9-09d7-4dd4-9987-4f0b7cbc292b" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.366410 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.372114 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.372225 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.372251 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.431705 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.458994 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90e94897-7fd7-4d70-b20d-1d3f429f3522-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"90e94897-7fd7-4d70-b20d-1d3f429f3522\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.459054 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmtlc\" (UniqueName: \"kubernetes.io/projected/90e94897-7fd7-4d70-b20d-1d3f429f3522-kube-api-access-wmtlc\") pod \"nova-cell1-novncproxy-0\" (UID: \"90e94897-7fd7-4d70-b20d-1d3f429f3522\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.459110 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/90e94897-7fd7-4d70-b20d-1d3f429f3522-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"90e94897-7fd7-4d70-b20d-1d3f429f3522\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.459176 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/90e94897-7fd7-4d70-b20d-1d3f429f3522-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"90e94897-7fd7-4d70-b20d-1d3f429f3522\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.459263 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90e94897-7fd7-4d70-b20d-1d3f429f3522-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"90e94897-7fd7-4d70-b20d-1d3f429f3522\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.560776 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmtlc\" (UniqueName: \"kubernetes.io/projected/90e94897-7fd7-4d70-b20d-1d3f429f3522-kube-api-access-wmtlc\") pod \"nova-cell1-novncproxy-0\" (UID: \"90e94897-7fd7-4d70-b20d-1d3f429f3522\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.560872 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/90e94897-7fd7-4d70-b20d-1d3f429f3522-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"90e94897-7fd7-4d70-b20d-1d3f429f3522\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.560942 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/90e94897-7fd7-4d70-b20d-1d3f429f3522-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"90e94897-7fd7-4d70-b20d-1d3f429f3522\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.561029 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90e94897-7fd7-4d70-b20d-1d3f429f3522-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"90e94897-7fd7-4d70-b20d-1d3f429f3522\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.561167 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90e94897-7fd7-4d70-b20d-1d3f429f3522-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"90e94897-7fd7-4d70-b20d-1d3f429f3522\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.581378 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/90e94897-7fd7-4d70-b20d-1d3f429f3522-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"90e94897-7fd7-4d70-b20d-1d3f429f3522\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.582007 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90e94897-7fd7-4d70-b20d-1d3f429f3522-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"90e94897-7fd7-4d70-b20d-1d3f429f3522\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.582856 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90e94897-7fd7-4d70-b20d-1d3f429f3522-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"90e94897-7fd7-4d70-b20d-1d3f429f3522\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.583011 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/90e94897-7fd7-4d70-b20d-1d3f429f3522-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"90e94897-7fd7-4d70-b20d-1d3f429f3522\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.588274 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmtlc\" (UniqueName: \"kubernetes.io/projected/90e94897-7fd7-4d70-b20d-1d3f429f3522-kube-api-access-wmtlc\") pod \"nova-cell1-novncproxy-0\" (UID: \"90e94897-7fd7-4d70-b20d-1d3f429f3522\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:30 crc kubenswrapper[4809]: I0930 00:32:30.692600 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:31 crc kubenswrapper[4809]: W0930 00:32:31.185038 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod90e94897_7fd7_4d70_b20d_1d3f429f3522.slice/crio-73b01abe9f7e0c96f14e18fb7589343c76f9f97656082822f716fabbcfbf13df WatchSource:0}: Error finding container 73b01abe9f7e0c96f14e18fb7589343c76f9f97656082822f716fabbcfbf13df: Status 404 returned error can't find the container with id 73b01abe9f7e0c96f14e18fb7589343c76f9f97656082822f716fabbcfbf13df Sep 30 00:32:31 crc kubenswrapper[4809]: I0930 00:32:31.187381 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 00:32:31 crc kubenswrapper[4809]: I0930 00:32:31.245088 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"90e94897-7fd7-4d70-b20d-1d3f429f3522","Type":"ContainerStarted","Data":"73b01abe9f7e0c96f14e18fb7589343c76f9f97656082822f716fabbcfbf13df"} Sep 30 00:32:31 crc kubenswrapper[4809]: I0930 00:32:31.252385 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9fcc0027-9738-4656-a707-2d814a18e23d","Type":"ContainerStarted","Data":"c1b53178d24acc15e8d3cd9544fad80893439bd4657dd14b96094e005142514f"} Sep 30 00:32:31 crc kubenswrapper[4809]: I0930 00:32:31.703565 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20ef09e9-09d7-4dd4-9987-4f0b7cbc292b" path="/var/lib/kubelet/pods/20ef09e9-09d7-4dd4-9987-4f0b7cbc292b/volumes" Sep 30 00:32:32 crc kubenswrapper[4809]: I0930 00:32:32.266259 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"90e94897-7fd7-4d70-b20d-1d3f429f3522","Type":"ContainerStarted","Data":"da0d4f5471d9059f48a7d98cbae10d08e534c2245a2398949ffe500d7de65bb0"} Sep 30 00:32:32 crc kubenswrapper[4809]: I0930 00:32:32.311132 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.311115209 podStartE2EDuration="2.311115209s" podCreationTimestamp="2025-09-30 00:32:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:32:32.302757601 +0000 UTC m=+1403.339007019" watchObservedRunningTime="2025-09-30 00:32:32.311115209 +0000 UTC m=+1403.347364617" Sep 30 00:32:32 crc kubenswrapper[4809]: I0930 00:32:32.643235 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 00:32:32 crc kubenswrapper[4809]: I0930 00:32:32.643631 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 00:32:32 crc kubenswrapper[4809]: I0930 00:32:32.645949 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 00:32:32 crc kubenswrapper[4809]: I0930 00:32:32.646428 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.276935 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9fcc0027-9738-4656-a707-2d814a18e23d","Type":"ContainerStarted","Data":"8b6da36f49923614bab5cf1a1bfe82a6ccff75b083d3e1467f696ce555e4a9f5"} Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.277393 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9fcc0027-9738-4656-a707-2d814a18e23d" containerName="ceilometer-central-agent" containerID="cri-o://e61d16988d18bd685965f6004764b116c5e833f818ab6c0f28eab6103134cf71" gracePeriod=30 Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.277479 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.277859 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9fcc0027-9738-4656-a707-2d814a18e23d" containerName="proxy-httpd" containerID="cri-o://8b6da36f49923614bab5cf1a1bfe82a6ccff75b083d3e1467f696ce555e4a9f5" gracePeriod=30 Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.277910 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9fcc0027-9738-4656-a707-2d814a18e23d" containerName="sg-core" containerID="cri-o://c1b53178d24acc15e8d3cd9544fad80893439bd4657dd14b96094e005142514f" gracePeriod=30 Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.277953 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9fcc0027-9738-4656-a707-2d814a18e23d" containerName="ceilometer-notification-agent" containerID="cri-o://c11639b1a9f462a6b647492b60875254f9e379108eff41fa59ee95d671b1c259" gracePeriod=30 Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.278397 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.289670 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.312782 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.13317635 podStartE2EDuration="11.312761071s" podCreationTimestamp="2025-09-30 00:32:22 +0000 UTC" firstStartedPulling="2025-09-30 00:32:23.071836147 +0000 UTC m=+1394.108085555" lastFinishedPulling="2025-09-30 00:32:32.251420868 +0000 UTC m=+1403.287670276" observedRunningTime="2025-09-30 00:32:33.303610361 +0000 UTC m=+1404.339859769" watchObservedRunningTime="2025-09-30 00:32:33.312761071 +0000 UTC m=+1404.349010479" Sep 30 00:32:33 crc kubenswrapper[4809]: E0930 00:32:33.462191 4809 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9fcc0027_9738_4656_a707_2d814a18e23d.slice/crio-conmon-c1b53178d24acc15e8d3cd9544fad80893439bd4657dd14b96094e005142514f.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9fcc0027_9738_4656_a707_2d814a18e23d.slice/crio-c1b53178d24acc15e8d3cd9544fad80893439bd4657dd14b96094e005142514f.scope\": RecentStats: unable to find data in memory cache]" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.600554 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79b5d74c8c-r2648"] Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.610421 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.627873 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79b5d74c8c-r2648"] Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.753595 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-dns-svc\") pod \"dnsmasq-dns-79b5d74c8c-r2648\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.753769 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7fvg\" (UniqueName: \"kubernetes.io/projected/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-kube-api-access-v7fvg\") pod \"dnsmasq-dns-79b5d74c8c-r2648\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.753796 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-config\") pod \"dnsmasq-dns-79b5d74c8c-r2648\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.753830 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-ovsdbserver-sb\") pod \"dnsmasq-dns-79b5d74c8c-r2648\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.753915 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-ovsdbserver-nb\") pod \"dnsmasq-dns-79b5d74c8c-r2648\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.753957 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-dns-swift-storage-0\") pod \"dnsmasq-dns-79b5d74c8c-r2648\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.855831 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7fvg\" (UniqueName: \"kubernetes.io/projected/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-kube-api-access-v7fvg\") pod \"dnsmasq-dns-79b5d74c8c-r2648\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.855896 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-config\") pod \"dnsmasq-dns-79b5d74c8c-r2648\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.855964 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-ovsdbserver-sb\") pod \"dnsmasq-dns-79b5d74c8c-r2648\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.856087 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-ovsdbserver-nb\") pod \"dnsmasq-dns-79b5d74c8c-r2648\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.856147 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-dns-swift-storage-0\") pod \"dnsmasq-dns-79b5d74c8c-r2648\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.856182 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-dns-svc\") pod \"dnsmasq-dns-79b5d74c8c-r2648\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.858343 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-config\") pod \"dnsmasq-dns-79b5d74c8c-r2648\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.859431 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-ovsdbserver-nb\") pod \"dnsmasq-dns-79b5d74c8c-r2648\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.860365 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-ovsdbserver-sb\") pod \"dnsmasq-dns-79b5d74c8c-r2648\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.860840 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-dns-swift-storage-0\") pod \"dnsmasq-dns-79b5d74c8c-r2648\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.861481 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-dns-svc\") pod \"dnsmasq-dns-79b5d74c8c-r2648\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.890539 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7fvg\" (UniqueName: \"kubernetes.io/projected/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-kube-api-access-v7fvg\") pod \"dnsmasq-dns-79b5d74c8c-r2648\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:33 crc kubenswrapper[4809]: I0930 00:32:33.938622 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:34 crc kubenswrapper[4809]: I0930 00:32:34.295988 4809 generic.go:334] "Generic (PLEG): container finished" podID="9fcc0027-9738-4656-a707-2d814a18e23d" containerID="8b6da36f49923614bab5cf1a1bfe82a6ccff75b083d3e1467f696ce555e4a9f5" exitCode=0 Sep 30 00:32:34 crc kubenswrapper[4809]: I0930 00:32:34.296328 4809 generic.go:334] "Generic (PLEG): container finished" podID="9fcc0027-9738-4656-a707-2d814a18e23d" containerID="c1b53178d24acc15e8d3cd9544fad80893439bd4657dd14b96094e005142514f" exitCode=2 Sep 30 00:32:34 crc kubenswrapper[4809]: I0930 00:32:34.296339 4809 generic.go:334] "Generic (PLEG): container finished" podID="9fcc0027-9738-4656-a707-2d814a18e23d" containerID="c11639b1a9f462a6b647492b60875254f9e379108eff41fa59ee95d671b1c259" exitCode=0 Sep 30 00:32:34 crc kubenswrapper[4809]: I0930 00:32:34.296349 4809 generic.go:334] "Generic (PLEG): container finished" podID="9fcc0027-9738-4656-a707-2d814a18e23d" containerID="e61d16988d18bd685965f6004764b116c5e833f818ab6c0f28eab6103134cf71" exitCode=0 Sep 30 00:32:34 crc kubenswrapper[4809]: I0930 00:32:34.296073 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9fcc0027-9738-4656-a707-2d814a18e23d","Type":"ContainerDied","Data":"8b6da36f49923614bab5cf1a1bfe82a6ccff75b083d3e1467f696ce555e4a9f5"} Sep 30 00:32:34 crc kubenswrapper[4809]: I0930 00:32:34.296807 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9fcc0027-9738-4656-a707-2d814a18e23d","Type":"ContainerDied","Data":"c1b53178d24acc15e8d3cd9544fad80893439bd4657dd14b96094e005142514f"} Sep 30 00:32:34 crc kubenswrapper[4809]: I0930 00:32:34.296832 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9fcc0027-9738-4656-a707-2d814a18e23d","Type":"ContainerDied","Data":"c11639b1a9f462a6b647492b60875254f9e379108eff41fa59ee95d671b1c259"} Sep 30 00:32:34 crc kubenswrapper[4809]: I0930 00:32:34.296844 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9fcc0027-9738-4656-a707-2d814a18e23d","Type":"ContainerDied","Data":"e61d16988d18bd685965f6004764b116c5e833f818ab6c0f28eab6103134cf71"} Sep 30 00:32:34 crc kubenswrapper[4809]: I0930 00:32:34.479466 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79b5d74c8c-r2648"] Sep 30 00:32:34 crc kubenswrapper[4809]: I0930 00:32:34.945992 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.078936 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-config-data\") pod \"9fcc0027-9738-4656-a707-2d814a18e23d\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.079421 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9fcc0027-9738-4656-a707-2d814a18e23d-log-httpd\") pod \"9fcc0027-9738-4656-a707-2d814a18e23d\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.079489 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-ceilometer-tls-certs\") pod \"9fcc0027-9738-4656-a707-2d814a18e23d\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.079588 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v5zsm\" (UniqueName: \"kubernetes.io/projected/9fcc0027-9738-4656-a707-2d814a18e23d-kube-api-access-v5zsm\") pod \"9fcc0027-9738-4656-a707-2d814a18e23d\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.079626 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-combined-ca-bundle\") pod \"9fcc0027-9738-4656-a707-2d814a18e23d\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.079690 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-sg-core-conf-yaml\") pod \"9fcc0027-9738-4656-a707-2d814a18e23d\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.079735 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9fcc0027-9738-4656-a707-2d814a18e23d-run-httpd\") pod \"9fcc0027-9738-4656-a707-2d814a18e23d\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.079783 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-scripts\") pod \"9fcc0027-9738-4656-a707-2d814a18e23d\" (UID: \"9fcc0027-9738-4656-a707-2d814a18e23d\") " Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.081216 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fcc0027-9738-4656-a707-2d814a18e23d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9fcc0027-9738-4656-a707-2d814a18e23d" (UID: "9fcc0027-9738-4656-a707-2d814a18e23d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.081453 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fcc0027-9738-4656-a707-2d814a18e23d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9fcc0027-9738-4656-a707-2d814a18e23d" (UID: "9fcc0027-9738-4656-a707-2d814a18e23d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.086545 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fcc0027-9738-4656-a707-2d814a18e23d-kube-api-access-v5zsm" (OuterVolumeSpecName: "kube-api-access-v5zsm") pod "9fcc0027-9738-4656-a707-2d814a18e23d" (UID: "9fcc0027-9738-4656-a707-2d814a18e23d"). InnerVolumeSpecName "kube-api-access-v5zsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.088759 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-scripts" (OuterVolumeSpecName: "scripts") pod "9fcc0027-9738-4656-a707-2d814a18e23d" (UID: "9fcc0027-9738-4656-a707-2d814a18e23d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.142996 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "9fcc0027-9738-4656-a707-2d814a18e23d" (UID: "9fcc0027-9738-4656-a707-2d814a18e23d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.184106 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v5zsm\" (UniqueName: \"kubernetes.io/projected/9fcc0027-9738-4656-a707-2d814a18e23d-kube-api-access-v5zsm\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.184146 4809 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.184161 4809 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9fcc0027-9738-4656-a707-2d814a18e23d-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.184172 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.184182 4809 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9fcc0027-9738-4656-a707-2d814a18e23d-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.189000 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "9fcc0027-9738-4656-a707-2d814a18e23d" (UID: "9fcc0027-9738-4656-a707-2d814a18e23d"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.211766 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9fcc0027-9738-4656-a707-2d814a18e23d" (UID: "9fcc0027-9738-4656-a707-2d814a18e23d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.250665 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-config-data" (OuterVolumeSpecName: "config-data") pod "9fcc0027-9738-4656-a707-2d814a18e23d" (UID: "9fcc0027-9738-4656-a707-2d814a18e23d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.285811 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.285845 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.285855 4809 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9fcc0027-9738-4656-a707-2d814a18e23d-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.307017 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.307027 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9fcc0027-9738-4656-a707-2d814a18e23d","Type":"ContainerDied","Data":"ac7ac25470feaa0e1e1e00583e5836d327c315739ca03472da8083cd460c1391"} Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.307087 4809 scope.go:117] "RemoveContainer" containerID="8b6da36f49923614bab5cf1a1bfe82a6ccff75b083d3e1467f696ce555e4a9f5" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.315757 4809 generic.go:334] "Generic (PLEG): container finished" podID="9709db2d-cb05-4c93-bf1f-ab235ff99e4b" containerID="4b7034503237629d2ae043f5dfdda32216255d2f333237e70a60cf2700c877fb" exitCode=0 Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.315858 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" event={"ID":"9709db2d-cb05-4c93-bf1f-ab235ff99e4b","Type":"ContainerDied","Data":"4b7034503237629d2ae043f5dfdda32216255d2f333237e70a60cf2700c877fb"} Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.315916 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" event={"ID":"9709db2d-cb05-4c93-bf1f-ab235ff99e4b","Type":"ContainerStarted","Data":"da3b0178874f87570e157a36f4a5dac102578fb6611f49fd0a55ce4fdd187c22"} Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.345782 4809 scope.go:117] "RemoveContainer" containerID="c1b53178d24acc15e8d3cd9544fad80893439bd4657dd14b96094e005142514f" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.365873 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.372391 4809 scope.go:117] "RemoveContainer" containerID="c11639b1a9f462a6b647492b60875254f9e379108eff41fa59ee95d671b1c259" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.386310 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.404284 4809 scope.go:117] "RemoveContainer" containerID="e61d16988d18bd685965f6004764b116c5e833f818ab6c0f28eab6103134cf71" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.415008 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:32:35 crc kubenswrapper[4809]: E0930 00:32:35.415803 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fcc0027-9738-4656-a707-2d814a18e23d" containerName="ceilometer-central-agent" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.415824 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fcc0027-9738-4656-a707-2d814a18e23d" containerName="ceilometer-central-agent" Sep 30 00:32:35 crc kubenswrapper[4809]: E0930 00:32:35.415857 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fcc0027-9738-4656-a707-2d814a18e23d" containerName="sg-core" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.415865 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fcc0027-9738-4656-a707-2d814a18e23d" containerName="sg-core" Sep 30 00:32:35 crc kubenswrapper[4809]: E0930 00:32:35.415882 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fcc0027-9738-4656-a707-2d814a18e23d" containerName="proxy-httpd" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.415890 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fcc0027-9738-4656-a707-2d814a18e23d" containerName="proxy-httpd" Sep 30 00:32:35 crc kubenswrapper[4809]: E0930 00:32:35.415911 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fcc0027-9738-4656-a707-2d814a18e23d" containerName="ceilometer-notification-agent" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.415918 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fcc0027-9738-4656-a707-2d814a18e23d" containerName="ceilometer-notification-agent" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.416154 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fcc0027-9738-4656-a707-2d814a18e23d" containerName="proxy-httpd" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.416191 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fcc0027-9738-4656-a707-2d814a18e23d" containerName="sg-core" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.416213 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fcc0027-9738-4656-a707-2d814a18e23d" containerName="ceilometer-central-agent" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.416230 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fcc0027-9738-4656-a707-2d814a18e23d" containerName="ceilometer-notification-agent" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.419116 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.427880 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.428108 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.430094 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.433853 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.491068 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-run-httpd\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.491102 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-scripts\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.491135 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-log-httpd\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.491169 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqpg9\" (UniqueName: \"kubernetes.io/projected/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-kube-api-access-cqpg9\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.491185 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.491205 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-config-data\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.491221 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.491287 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.593268 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqpg9\" (UniqueName: \"kubernetes.io/projected/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-kube-api-access-cqpg9\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.593326 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.593366 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-config-data\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.593391 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.593502 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.593671 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-run-httpd\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.593708 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-scripts\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.593744 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-log-httpd\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.594370 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-log-httpd\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.594962 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-run-httpd\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.601997 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.603551 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-config-data\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.603827 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-scripts\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.608145 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.613239 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.616844 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqpg9\" (UniqueName: \"kubernetes.io/projected/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-kube-api-access-cqpg9\") pod \"ceilometer-0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " pod="openstack/ceilometer-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.704284 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9fcc0027-9738-4656-a707-2d814a18e23d" path="/var/lib/kubelet/pods/9fcc0027-9738-4656-a707-2d814a18e23d/volumes" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.705260 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:35 crc kubenswrapper[4809]: I0930 00:32:35.810437 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:32:36 crc kubenswrapper[4809]: I0930 00:32:36.327883 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" event={"ID":"9709db2d-cb05-4c93-bf1f-ab235ff99e4b","Type":"ContainerStarted","Data":"561b3ad42234e5771ab0886c010aafba6e6392be16e1e13c7421bb46cd3b4629"} Sep 30 00:32:36 crc kubenswrapper[4809]: I0930 00:32:36.328768 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:36 crc kubenswrapper[4809]: I0930 00:32:36.385363 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" podStartSLOduration=3.385346405 podStartE2EDuration="3.385346405s" podCreationTimestamp="2025-09-30 00:32:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:32:36.37997533 +0000 UTC m=+1407.416224738" watchObservedRunningTime="2025-09-30 00:32:36.385346405 +0000 UTC m=+1407.421595813" Sep 30 00:32:36 crc kubenswrapper[4809]: I0930 00:32:36.496144 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:32:36 crc kubenswrapper[4809]: I0930 00:32:36.660051 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:32:36 crc kubenswrapper[4809]: I0930 00:32:36.889484 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:32:36 crc kubenswrapper[4809]: I0930 00:32:36.889917 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7c10753d-9437-47e0-b504-a2fede89973d" containerName="nova-api-log" containerID="cri-o://2480510bd0ba09b7f7e5f6b92445258bef1db33916f7de04d87e6422ce439a4f" gracePeriod=30 Sep 30 00:32:36 crc kubenswrapper[4809]: I0930 00:32:36.889995 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7c10753d-9437-47e0-b504-a2fede89973d" containerName="nova-api-api" containerID="cri-o://9839bbad7adaf281459d5fcb34dc9bca4f9c3d9850cc206c91a548b2affcf14c" gracePeriod=30 Sep 30 00:32:37 crc kubenswrapper[4809]: I0930 00:32:37.349347 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5c35188-2ce4-4a47-9e12-486aaffbe6b0","Type":"ContainerStarted","Data":"c88b5d5563316f34217a05b0d46b19d872278ce7620ba1b25ec3c2ccc9d12c09"} Sep 30 00:32:37 crc kubenswrapper[4809]: I0930 00:32:37.350689 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5c35188-2ce4-4a47-9e12-486aaffbe6b0","Type":"ContainerStarted","Data":"232e0d6b53d1e60977719a252f7d60ecc104e09f747bd4cce8e94ad6b07039ee"} Sep 30 00:32:37 crc kubenswrapper[4809]: I0930 00:32:37.352487 4809 generic.go:334] "Generic (PLEG): container finished" podID="7c10753d-9437-47e0-b504-a2fede89973d" containerID="2480510bd0ba09b7f7e5f6b92445258bef1db33916f7de04d87e6422ce439a4f" exitCode=143 Sep 30 00:32:37 crc kubenswrapper[4809]: I0930 00:32:37.352576 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7c10753d-9437-47e0-b504-a2fede89973d","Type":"ContainerDied","Data":"2480510bd0ba09b7f7e5f6b92445258bef1db33916f7de04d87e6422ce439a4f"} Sep 30 00:32:38 crc kubenswrapper[4809]: I0930 00:32:38.364549 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5c35188-2ce4-4a47-9e12-486aaffbe6b0","Type":"ContainerStarted","Data":"b17c080957c0bcc39ee7d26f62bda9d4cdfed24c40e76e01d5d6eeff48e12ea9"} Sep 30 00:32:39 crc kubenswrapper[4809]: I0930 00:32:39.377757 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5c35188-2ce4-4a47-9e12-486aaffbe6b0","Type":"ContainerStarted","Data":"9e12416376c669d0d0659aee2fbfe3c94043afbfc8ac0118e4968b8fc76a607d"} Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.396603 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5c35188-2ce4-4a47-9e12-486aaffbe6b0","Type":"ContainerStarted","Data":"5668aa20357847587b845fc41f059cc70133a28d31f8bb21915a65b5c7213dc2"} Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.396775 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" containerName="ceilometer-central-agent" containerID="cri-o://c88b5d5563316f34217a05b0d46b19d872278ce7620ba1b25ec3c2ccc9d12c09" gracePeriod=30 Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.397021 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.397202 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" containerName="sg-core" containerID="cri-o://9e12416376c669d0d0659aee2fbfe3c94043afbfc8ac0118e4968b8fc76a607d" gracePeriod=30 Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.397305 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" containerName="proxy-httpd" containerID="cri-o://5668aa20357847587b845fc41f059cc70133a28d31f8bb21915a65b5c7213dc2" gracePeriod=30 Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.397308 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" containerName="ceilometer-notification-agent" containerID="cri-o://b17c080957c0bcc39ee7d26f62bda9d4cdfed24c40e76e01d5d6eeff48e12ea9" gracePeriod=30 Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.407840 4809 generic.go:334] "Generic (PLEG): container finished" podID="7c10753d-9437-47e0-b504-a2fede89973d" containerID="9839bbad7adaf281459d5fcb34dc9bca4f9c3d9850cc206c91a548b2affcf14c" exitCode=0 Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.407899 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7c10753d-9437-47e0-b504-a2fede89973d","Type":"ContainerDied","Data":"9839bbad7adaf281459d5fcb34dc9bca4f9c3d9850cc206c91a548b2affcf14c"} Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.435862 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.020763915 podStartE2EDuration="5.435836913s" podCreationTimestamp="2025-09-30 00:32:35 +0000 UTC" firstStartedPulling="2025-09-30 00:32:36.473856605 +0000 UTC m=+1407.510106013" lastFinishedPulling="2025-09-30 00:32:39.888929603 +0000 UTC m=+1410.925179011" observedRunningTime="2025-09-30 00:32:40.429669095 +0000 UTC m=+1411.465918493" watchObservedRunningTime="2025-09-30 00:32:40.435836913 +0000 UTC m=+1411.472086321" Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.582507 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.694970 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.723403 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.729723 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c10753d-9437-47e0-b504-a2fede89973d-combined-ca-bundle\") pod \"7c10753d-9437-47e0-b504-a2fede89973d\" (UID: \"7c10753d-9437-47e0-b504-a2fede89973d\") " Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.729821 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c10753d-9437-47e0-b504-a2fede89973d-logs\") pod \"7c10753d-9437-47e0-b504-a2fede89973d\" (UID: \"7c10753d-9437-47e0-b504-a2fede89973d\") " Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.729866 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c10753d-9437-47e0-b504-a2fede89973d-config-data\") pod \"7c10753d-9437-47e0-b504-a2fede89973d\" (UID: \"7c10753d-9437-47e0-b504-a2fede89973d\") " Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.729918 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s7skv\" (UniqueName: \"kubernetes.io/projected/7c10753d-9437-47e0-b504-a2fede89973d-kube-api-access-s7skv\") pod \"7c10753d-9437-47e0-b504-a2fede89973d\" (UID: \"7c10753d-9437-47e0-b504-a2fede89973d\") " Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.730408 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c10753d-9437-47e0-b504-a2fede89973d-logs" (OuterVolumeSpecName: "logs") pod "7c10753d-9437-47e0-b504-a2fede89973d" (UID: "7c10753d-9437-47e0-b504-a2fede89973d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.730535 4809 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c10753d-9437-47e0-b504-a2fede89973d-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.740848 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c10753d-9437-47e0-b504-a2fede89973d-kube-api-access-s7skv" (OuterVolumeSpecName: "kube-api-access-s7skv") pod "7c10753d-9437-47e0-b504-a2fede89973d" (UID: "7c10753d-9437-47e0-b504-a2fede89973d"). InnerVolumeSpecName "kube-api-access-s7skv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.763155 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c10753d-9437-47e0-b504-a2fede89973d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c10753d-9437-47e0-b504-a2fede89973d" (UID: "7c10753d-9437-47e0-b504-a2fede89973d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.774851 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c10753d-9437-47e0-b504-a2fede89973d-config-data" (OuterVolumeSpecName: "config-data") pod "7c10753d-9437-47e0-b504-a2fede89973d" (UID: "7c10753d-9437-47e0-b504-a2fede89973d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.833252 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c10753d-9437-47e0-b504-a2fede89973d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.833291 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c10753d-9437-47e0-b504-a2fede89973d-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:40 crc kubenswrapper[4809]: I0930 00:32:40.833306 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s7skv\" (UniqueName: \"kubernetes.io/projected/7c10753d-9437-47e0-b504-a2fede89973d-kube-api-access-s7skv\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.421250 4809 generic.go:334] "Generic (PLEG): container finished" podID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" containerID="5668aa20357847587b845fc41f059cc70133a28d31f8bb21915a65b5c7213dc2" exitCode=0 Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.421583 4809 generic.go:334] "Generic (PLEG): container finished" podID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" containerID="9e12416376c669d0d0659aee2fbfe3c94043afbfc8ac0118e4968b8fc76a607d" exitCode=2 Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.421591 4809 generic.go:334] "Generic (PLEG): container finished" podID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" containerID="b17c080957c0bcc39ee7d26f62bda9d4cdfed24c40e76e01d5d6eeff48e12ea9" exitCode=0 Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.421292 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5c35188-2ce4-4a47-9e12-486aaffbe6b0","Type":"ContainerDied","Data":"5668aa20357847587b845fc41f059cc70133a28d31f8bb21915a65b5c7213dc2"} Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.421693 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5c35188-2ce4-4a47-9e12-486aaffbe6b0","Type":"ContainerDied","Data":"9e12416376c669d0d0659aee2fbfe3c94043afbfc8ac0118e4968b8fc76a607d"} Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.421729 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5c35188-2ce4-4a47-9e12-486aaffbe6b0","Type":"ContainerDied","Data":"b17c080957c0bcc39ee7d26f62bda9d4cdfed24c40e76e01d5d6eeff48e12ea9"} Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.424537 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.424529 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7c10753d-9437-47e0-b504-a2fede89973d","Type":"ContainerDied","Data":"db705989ef545346ea5c6bf5a4dbf049f7c2df6e8e540085ece2ab862947c655"} Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.424593 4809 scope.go:117] "RemoveContainer" containerID="9839bbad7adaf281459d5fcb34dc9bca4f9c3d9850cc206c91a548b2affcf14c" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.445366 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.460560 4809 scope.go:117] "RemoveContainer" containerID="2480510bd0ba09b7f7e5f6b92445258bef1db33916f7de04d87e6422ce439a4f" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.510234 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.526383 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.551989 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 00:32:41 crc kubenswrapper[4809]: E0930 00:32:41.552592 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c10753d-9437-47e0-b504-a2fede89973d" containerName="nova-api-api" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.552630 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c10753d-9437-47e0-b504-a2fede89973d" containerName="nova-api-api" Sep 30 00:32:41 crc kubenswrapper[4809]: E0930 00:32:41.552674 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c10753d-9437-47e0-b504-a2fede89973d" containerName="nova-api-log" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.552683 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c10753d-9437-47e0-b504-a2fede89973d" containerName="nova-api-log" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.552954 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c10753d-9437-47e0-b504-a2fede89973d" containerName="nova-api-log" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.552981 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c10753d-9437-47e0-b504-a2fede89973d" containerName="nova-api-api" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.554470 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.559677 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.560148 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.560326 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.562888 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.643329 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-c2txl"] Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.645180 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-c2txl" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.647789 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8twk4\" (UniqueName: \"kubernetes.io/projected/e9293a09-0853-43ee-bf42-6e634b4ce617-kube-api-access-8twk4\") pod \"nova-api-0\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.647865 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9293a09-0853-43ee-bf42-6e634b4ce617-logs\") pod \"nova-api-0\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.647893 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.647930 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.647965 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-config-data\") pod \"nova-api-0\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.648017 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-public-tls-certs\") pod \"nova-api-0\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.651725 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.651818 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.652401 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-c2txl"] Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.702423 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c10753d-9437-47e0-b504-a2fede89973d" path="/var/lib/kubelet/pods/7c10753d-9437-47e0-b504-a2fede89973d/volumes" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.749638 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkxr7\" (UniqueName: \"kubernetes.io/projected/009b9521-367b-4991-910e-b3ede9622095-kube-api-access-xkxr7\") pod \"nova-cell1-cell-mapping-c2txl\" (UID: \"009b9521-367b-4991-910e-b3ede9622095\") " pod="openstack/nova-cell1-cell-mapping-c2txl" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.749695 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9293a09-0853-43ee-bf42-6e634b4ce617-logs\") pod \"nova-api-0\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.749717 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.749746 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.749773 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/009b9521-367b-4991-910e-b3ede9622095-config-data\") pod \"nova-cell1-cell-mapping-c2txl\" (UID: \"009b9521-367b-4991-910e-b3ede9622095\") " pod="openstack/nova-cell1-cell-mapping-c2txl" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.749797 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/009b9521-367b-4991-910e-b3ede9622095-scripts\") pod \"nova-cell1-cell-mapping-c2txl\" (UID: \"009b9521-367b-4991-910e-b3ede9622095\") " pod="openstack/nova-cell1-cell-mapping-c2txl" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.749814 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-config-data\") pod \"nova-api-0\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.750035 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-public-tls-certs\") pod \"nova-api-0\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.750125 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9293a09-0853-43ee-bf42-6e634b4ce617-logs\") pod \"nova-api-0\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.750452 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/009b9521-367b-4991-910e-b3ede9622095-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-c2txl\" (UID: \"009b9521-367b-4991-910e-b3ede9622095\") " pod="openstack/nova-cell1-cell-mapping-c2txl" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.750578 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8twk4\" (UniqueName: \"kubernetes.io/projected/e9293a09-0853-43ee-bf42-6e634b4ce617-kube-api-access-8twk4\") pod \"nova-api-0\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.755523 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.756609 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-public-tls-certs\") pod \"nova-api-0\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.756613 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.762919 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-config-data\") pod \"nova-api-0\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.765519 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8twk4\" (UniqueName: \"kubernetes.io/projected/e9293a09-0853-43ee-bf42-6e634b4ce617-kube-api-access-8twk4\") pod \"nova-api-0\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.852574 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/009b9521-367b-4991-910e-b3ede9622095-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-c2txl\" (UID: \"009b9521-367b-4991-910e-b3ede9622095\") " pod="openstack/nova-cell1-cell-mapping-c2txl" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.852862 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkxr7\" (UniqueName: \"kubernetes.io/projected/009b9521-367b-4991-910e-b3ede9622095-kube-api-access-xkxr7\") pod \"nova-cell1-cell-mapping-c2txl\" (UID: \"009b9521-367b-4991-910e-b3ede9622095\") " pod="openstack/nova-cell1-cell-mapping-c2txl" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.852926 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/009b9521-367b-4991-910e-b3ede9622095-config-data\") pod \"nova-cell1-cell-mapping-c2txl\" (UID: \"009b9521-367b-4991-910e-b3ede9622095\") " pod="openstack/nova-cell1-cell-mapping-c2txl" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.852945 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/009b9521-367b-4991-910e-b3ede9622095-scripts\") pod \"nova-cell1-cell-mapping-c2txl\" (UID: \"009b9521-367b-4991-910e-b3ede9622095\") " pod="openstack/nova-cell1-cell-mapping-c2txl" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.856307 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/009b9521-367b-4991-910e-b3ede9622095-config-data\") pod \"nova-cell1-cell-mapping-c2txl\" (UID: \"009b9521-367b-4991-910e-b3ede9622095\") " pod="openstack/nova-cell1-cell-mapping-c2txl" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.856515 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/009b9521-367b-4991-910e-b3ede9622095-scripts\") pod \"nova-cell1-cell-mapping-c2txl\" (UID: \"009b9521-367b-4991-910e-b3ede9622095\") " pod="openstack/nova-cell1-cell-mapping-c2txl" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.856564 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/009b9521-367b-4991-910e-b3ede9622095-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-c2txl\" (UID: \"009b9521-367b-4991-910e-b3ede9622095\") " pod="openstack/nova-cell1-cell-mapping-c2txl" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.870450 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkxr7\" (UniqueName: \"kubernetes.io/projected/009b9521-367b-4991-910e-b3ede9622095-kube-api-access-xkxr7\") pod \"nova-cell1-cell-mapping-c2txl\" (UID: \"009b9521-367b-4991-910e-b3ede9622095\") " pod="openstack/nova-cell1-cell-mapping-c2txl" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.880235 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:32:41 crc kubenswrapper[4809]: I0930 00:32:41.962621 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-c2txl" Sep 30 00:32:42 crc kubenswrapper[4809]: I0930 00:32:42.404117 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:32:42 crc kubenswrapper[4809]: W0930 00:32:42.409949 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9293a09_0853_43ee_bf42_6e634b4ce617.slice/crio-14906ce02fd08931400870f7f5ee96a03da9f8103c0c42ca0a3b30e0c0b2e226 WatchSource:0}: Error finding container 14906ce02fd08931400870f7f5ee96a03da9f8103c0c42ca0a3b30e0c0b2e226: Status 404 returned error can't find the container with id 14906ce02fd08931400870f7f5ee96a03da9f8103c0c42ca0a3b30e0c0b2e226 Sep 30 00:32:42 crc kubenswrapper[4809]: I0930 00:32:42.463120 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e9293a09-0853-43ee-bf42-6e634b4ce617","Type":"ContainerStarted","Data":"14906ce02fd08931400870f7f5ee96a03da9f8103c0c42ca0a3b30e0c0b2e226"} Sep 30 00:32:42 crc kubenswrapper[4809]: I0930 00:32:42.625319 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-c2txl"] Sep 30 00:32:43 crc kubenswrapper[4809]: I0930 00:32:43.473872 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-c2txl" event={"ID":"009b9521-367b-4991-910e-b3ede9622095","Type":"ContainerStarted","Data":"bf4b137ba6dc126192b8984b62420580aaadd43805dc3cd4f86aba951d598406"} Sep 30 00:32:43 crc kubenswrapper[4809]: I0930 00:32:43.473909 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-c2txl" event={"ID":"009b9521-367b-4991-910e-b3ede9622095","Type":"ContainerStarted","Data":"88b49b8b4f26204062272d1e1dffc2b4975693f3ba9d802f67ea90ef4e8bdf31"} Sep 30 00:32:43 crc kubenswrapper[4809]: I0930 00:32:43.475951 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e9293a09-0853-43ee-bf42-6e634b4ce617","Type":"ContainerStarted","Data":"a66e14a88c6603916e2ecbd6ec45b34d6a08e99eea7a88a408515dbc9fe05c1c"} Sep 30 00:32:43 crc kubenswrapper[4809]: I0930 00:32:43.476227 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e9293a09-0853-43ee-bf42-6e634b4ce617","Type":"ContainerStarted","Data":"d1c962b5992318ae0cdc5e21ea6f0b18c12adb24e75b2ecc903f587740862d54"} Sep 30 00:32:43 crc kubenswrapper[4809]: I0930 00:32:43.492314 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-c2txl" podStartSLOduration=2.492283157 podStartE2EDuration="2.492283157s" podCreationTimestamp="2025-09-30 00:32:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:32:43.488393891 +0000 UTC m=+1414.524643299" watchObservedRunningTime="2025-09-30 00:32:43.492283157 +0000 UTC m=+1414.528532565" Sep 30 00:32:43 crc kubenswrapper[4809]: I0930 00:32:43.511919 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.511902653 podStartE2EDuration="2.511902653s" podCreationTimestamp="2025-09-30 00:32:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:32:43.506178717 +0000 UTC m=+1414.542428125" watchObservedRunningTime="2025-09-30 00:32:43.511902653 +0000 UTC m=+1414.548152061" Sep 30 00:32:43 crc kubenswrapper[4809]: I0930 00:32:43.940804 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.011893 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5fbc4d444f-gjm9z"] Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.012487 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" podUID="4e24c1d3-e30e-4b8b-a034-490cdb943da8" containerName="dnsmasq-dns" containerID="cri-o://34b1449cc7730f0567694ae66e51bb7b87b7f308c156ff37d189670b0131514d" gracePeriod=10 Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.109512 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" podUID="4e24c1d3-e30e-4b8b-a034-490cdb943da8" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.221:5353: connect: connection refused" Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.508713 4809 generic.go:334] "Generic (PLEG): container finished" podID="4e24c1d3-e30e-4b8b-a034-490cdb943da8" containerID="34b1449cc7730f0567694ae66e51bb7b87b7f308c156ff37d189670b0131514d" exitCode=0 Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.509104 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" event={"ID":"4e24c1d3-e30e-4b8b-a034-490cdb943da8","Type":"ContainerDied","Data":"34b1449cc7730f0567694ae66e51bb7b87b7f308c156ff37d189670b0131514d"} Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.513939 4809 generic.go:334] "Generic (PLEG): container finished" podID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" containerID="c88b5d5563316f34217a05b0d46b19d872278ce7620ba1b25ec3c2ccc9d12c09" exitCode=0 Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.514029 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5c35188-2ce4-4a47-9e12-486aaffbe6b0","Type":"ContainerDied","Data":"c88b5d5563316f34217a05b0d46b19d872278ce7620ba1b25ec3c2ccc9d12c09"} Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.789674 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.796480 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.930559 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-combined-ca-bundle\") pod \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.930866 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-run-httpd\") pod \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.930979 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-dns-swift-storage-0\") pod \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.931092 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-ovsdbserver-sb\") pod \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.931179 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-log-httpd\") pod \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.931258 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-config-data\") pod \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.931372 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-sg-core-conf-yaml\") pod \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.931453 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqpg9\" (UniqueName: \"kubernetes.io/projected/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-kube-api-access-cqpg9\") pod \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.931512 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-ceilometer-tls-certs\") pod \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.931605 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-ovsdbserver-nb\") pod \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.931756 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-scripts\") pod \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\" (UID: \"a5c35188-2ce4-4a47-9e12-486aaffbe6b0\") " Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.931914 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-config\") pod \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.932018 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnggx\" (UniqueName: \"kubernetes.io/projected/4e24c1d3-e30e-4b8b-a034-490cdb943da8-kube-api-access-rnggx\") pod \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.932150 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-dns-svc\") pod \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\" (UID: \"4e24c1d3-e30e-4b8b-a034-490cdb943da8\") " Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.932477 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a5c35188-2ce4-4a47-9e12-486aaffbe6b0" (UID: "a5c35188-2ce4-4a47-9e12-486aaffbe6b0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.933022 4809 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.933056 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a5c35188-2ce4-4a47-9e12-486aaffbe6b0" (UID: "a5c35188-2ce4-4a47-9e12-486aaffbe6b0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.941445 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-kube-api-access-cqpg9" (OuterVolumeSpecName: "kube-api-access-cqpg9") pod "a5c35188-2ce4-4a47-9e12-486aaffbe6b0" (UID: "a5c35188-2ce4-4a47-9e12-486aaffbe6b0"). InnerVolumeSpecName "kube-api-access-cqpg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.945252 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-scripts" (OuterVolumeSpecName: "scripts") pod "a5c35188-2ce4-4a47-9e12-486aaffbe6b0" (UID: "a5c35188-2ce4-4a47-9e12-486aaffbe6b0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.950624 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e24c1d3-e30e-4b8b-a034-490cdb943da8-kube-api-access-rnggx" (OuterVolumeSpecName: "kube-api-access-rnggx") pod "4e24c1d3-e30e-4b8b-a034-490cdb943da8" (UID: "4e24c1d3-e30e-4b8b-a034-490cdb943da8"). InnerVolumeSpecName "kube-api-access-rnggx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:44 crc kubenswrapper[4809]: I0930 00:32:44.996819 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a5c35188-2ce4-4a47-9e12-486aaffbe6b0" (UID: "a5c35188-2ce4-4a47-9e12-486aaffbe6b0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.023740 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4e24c1d3-e30e-4b8b-a034-490cdb943da8" (UID: "4e24c1d3-e30e-4b8b-a034-490cdb943da8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.035650 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.035714 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnggx\" (UniqueName: \"kubernetes.io/projected/4e24c1d3-e30e-4b8b-a034-490cdb943da8-kube-api-access-rnggx\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.035730 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.035743 4809 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.035774 4809 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.035786 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqpg9\" (UniqueName: \"kubernetes.io/projected/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-kube-api-access-cqpg9\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.038132 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "a5c35188-2ce4-4a47-9e12-486aaffbe6b0" (UID: "a5c35188-2ce4-4a47-9e12-486aaffbe6b0"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.056399 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "4e24c1d3-e30e-4b8b-a034-490cdb943da8" (UID: "4e24c1d3-e30e-4b8b-a034-490cdb943da8"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.060866 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a5c35188-2ce4-4a47-9e12-486aaffbe6b0" (UID: "a5c35188-2ce4-4a47-9e12-486aaffbe6b0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.062700 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4e24c1d3-e30e-4b8b-a034-490cdb943da8" (UID: "4e24c1d3-e30e-4b8b-a034-490cdb943da8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.079927 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-config" (OuterVolumeSpecName: "config") pod "4e24c1d3-e30e-4b8b-a034-490cdb943da8" (UID: "4e24c1d3-e30e-4b8b-a034-490cdb943da8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.101543 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4e24c1d3-e30e-4b8b-a034-490cdb943da8" (UID: "4e24c1d3-e30e-4b8b-a034-490cdb943da8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.118834 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-config-data" (OuterVolumeSpecName: "config-data") pod "a5c35188-2ce4-4a47-9e12-486aaffbe6b0" (UID: "a5c35188-2ce4-4a47-9e12-486aaffbe6b0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.137516 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.137547 4809 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.137559 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.137568 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.137576 4809 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.137584 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5c35188-2ce4-4a47-9e12-486aaffbe6b0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.137593 4809 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4e24c1d3-e30e-4b8b-a034-490cdb943da8-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.550966 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.553509 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fbc4d444f-gjm9z" event={"ID":"4e24c1d3-e30e-4b8b-a034-490cdb943da8","Type":"ContainerDied","Data":"21b0e391d10492ac591a57b15f8b6148899a79f0bb7b107aa3163bbf5995e866"} Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.553581 4809 scope.go:117] "RemoveContainer" containerID="34b1449cc7730f0567694ae66e51bb7b87b7f308c156ff37d189670b0131514d" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.560315 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5c35188-2ce4-4a47-9e12-486aaffbe6b0","Type":"ContainerDied","Data":"232e0d6b53d1e60977719a252f7d60ecc104e09f747bd4cce8e94ad6b07039ee"} Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.560389 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.578709 4809 scope.go:117] "RemoveContainer" containerID="ba2a65bfe9dc75d84411d6997d8d45384b0913ded84b953c87ef7ec18c072b7a" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.613127 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.627928 4809 scope.go:117] "RemoveContainer" containerID="5668aa20357847587b845fc41f059cc70133a28d31f8bb21915a65b5c7213dc2" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.632301 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.652742 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5fbc4d444f-gjm9z"] Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.662806 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5fbc4d444f-gjm9z"] Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.670660 4809 scope.go:117] "RemoveContainer" containerID="9e12416376c669d0d0659aee2fbfe3c94043afbfc8ac0118e4968b8fc76a607d" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.675692 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:32:45 crc kubenswrapper[4809]: E0930 00:32:45.676324 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" containerName="ceilometer-notification-agent" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.676347 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" containerName="ceilometer-notification-agent" Sep 30 00:32:45 crc kubenswrapper[4809]: E0930 00:32:45.676388 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" containerName="proxy-httpd" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.676402 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" containerName="proxy-httpd" Sep 30 00:32:45 crc kubenswrapper[4809]: E0930 00:32:45.676421 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" containerName="ceilometer-central-agent" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.676429 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" containerName="ceilometer-central-agent" Sep 30 00:32:45 crc kubenswrapper[4809]: E0930 00:32:45.676439 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" containerName="sg-core" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.676446 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" containerName="sg-core" Sep 30 00:32:45 crc kubenswrapper[4809]: E0930 00:32:45.676466 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e24c1d3-e30e-4b8b-a034-490cdb943da8" containerName="init" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.676475 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e24c1d3-e30e-4b8b-a034-490cdb943da8" containerName="init" Sep 30 00:32:45 crc kubenswrapper[4809]: E0930 00:32:45.676497 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e24c1d3-e30e-4b8b-a034-490cdb943da8" containerName="dnsmasq-dns" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.676505 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e24c1d3-e30e-4b8b-a034-490cdb943da8" containerName="dnsmasq-dns" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.676801 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" containerName="ceilometer-notification-agent" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.676821 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e24c1d3-e30e-4b8b-a034-490cdb943da8" containerName="dnsmasq-dns" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.676841 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" containerName="ceilometer-central-agent" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.676857 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" containerName="proxy-httpd" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.676889 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" containerName="sg-core" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.694521 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.700822 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.701097 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.701200 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.737073 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e24c1d3-e30e-4b8b-a034-490cdb943da8" path="/var/lib/kubelet/pods/4e24c1d3-e30e-4b8b-a034-490cdb943da8/volumes" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.737752 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5c35188-2ce4-4a47-9e12-486aaffbe6b0" path="/var/lib/kubelet/pods/a5c35188-2ce4-4a47-9e12-486aaffbe6b0/volumes" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.738577 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.771183 4809 scope.go:117] "RemoveContainer" containerID="b17c080957c0bcc39ee7d26f62bda9d4cdfed24c40e76e01d5d6eeff48e12ea9" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.846665 4809 scope.go:117] "RemoveContainer" containerID="c88b5d5563316f34217a05b0d46b19d872278ce7620ba1b25ec3c2ccc9d12c09" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.862204 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.862298 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-scripts\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.862343 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-config-data\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.862380 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6afe7005-5d90-43f2-9d21-e3202ab493dd-log-httpd\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.862414 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jwgv\" (UniqueName: \"kubernetes.io/projected/6afe7005-5d90-43f2-9d21-e3202ab493dd-kube-api-access-7jwgv\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.862433 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6afe7005-5d90-43f2-9d21-e3202ab493dd-run-httpd\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.862490 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.862561 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.964312 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-config-data\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.964367 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6afe7005-5d90-43f2-9d21-e3202ab493dd-log-httpd\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.964395 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jwgv\" (UniqueName: \"kubernetes.io/projected/6afe7005-5d90-43f2-9d21-e3202ab493dd-kube-api-access-7jwgv\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.964413 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6afe7005-5d90-43f2-9d21-e3202ab493dd-run-httpd\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.964455 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.964509 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.964556 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.964602 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-scripts\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.965241 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6afe7005-5d90-43f2-9d21-e3202ab493dd-log-httpd\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.965350 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6afe7005-5d90-43f2-9d21-e3202ab493dd-run-httpd\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.968344 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.969307 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-config-data\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.970857 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.971289 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.980052 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-scripts\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:45 crc kubenswrapper[4809]: I0930 00:32:45.985240 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jwgv\" (UniqueName: \"kubernetes.io/projected/6afe7005-5d90-43f2-9d21-e3202ab493dd-kube-api-access-7jwgv\") pod \"ceilometer-0\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " pod="openstack/ceilometer-0" Sep 30 00:32:46 crc kubenswrapper[4809]: I0930 00:32:46.039135 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:32:46 crc kubenswrapper[4809]: I0930 00:32:46.500818 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:32:46 crc kubenswrapper[4809]: I0930 00:32:46.574084 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6afe7005-5d90-43f2-9d21-e3202ab493dd","Type":"ContainerStarted","Data":"1fb6b795525a718cece848c3a1caf802b9d24694ec56326089523461455d62d4"} Sep 30 00:32:47 crc kubenswrapper[4809]: I0930 00:32:47.586908 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6afe7005-5d90-43f2-9d21-e3202ab493dd","Type":"ContainerStarted","Data":"ef8e96a2afaab6d0ce644b7dda6bac6a02cf54138f86cbfc72633ba07876ee4c"} Sep 30 00:32:48 crc kubenswrapper[4809]: I0930 00:32:48.602722 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6afe7005-5d90-43f2-9d21-e3202ab493dd","Type":"ContainerStarted","Data":"e5878f5e48946bdd6b749df99dbceb163e1dc4c7dceae7e04efa4d9b99ca1184"} Sep 30 00:32:48 crc kubenswrapper[4809]: I0930 00:32:48.605610 4809 generic.go:334] "Generic (PLEG): container finished" podID="009b9521-367b-4991-910e-b3ede9622095" containerID="bf4b137ba6dc126192b8984b62420580aaadd43805dc3cd4f86aba951d598406" exitCode=0 Sep 30 00:32:48 crc kubenswrapper[4809]: I0930 00:32:48.605676 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-c2txl" event={"ID":"009b9521-367b-4991-910e-b3ede9622095","Type":"ContainerDied","Data":"bf4b137ba6dc126192b8984b62420580aaadd43805dc3cd4f86aba951d598406"} Sep 30 00:32:49 crc kubenswrapper[4809]: I0930 00:32:49.618682 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6afe7005-5d90-43f2-9d21-e3202ab493dd","Type":"ContainerStarted","Data":"18861d97d4e22518cf163cb651fd58878379a81cdc1e33b642352dd7317a94ed"} Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.134766 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-c2txl" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.256409 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/009b9521-367b-4991-910e-b3ede9622095-scripts\") pod \"009b9521-367b-4991-910e-b3ede9622095\" (UID: \"009b9521-367b-4991-910e-b3ede9622095\") " Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.256707 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/009b9521-367b-4991-910e-b3ede9622095-config-data\") pod \"009b9521-367b-4991-910e-b3ede9622095\" (UID: \"009b9521-367b-4991-910e-b3ede9622095\") " Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.256755 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/009b9521-367b-4991-910e-b3ede9622095-combined-ca-bundle\") pod \"009b9521-367b-4991-910e-b3ede9622095\" (UID: \"009b9521-367b-4991-910e-b3ede9622095\") " Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.256801 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkxr7\" (UniqueName: \"kubernetes.io/projected/009b9521-367b-4991-910e-b3ede9622095-kube-api-access-xkxr7\") pod \"009b9521-367b-4991-910e-b3ede9622095\" (UID: \"009b9521-367b-4991-910e-b3ede9622095\") " Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.267742 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rgnkd"] Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.274854 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/009b9521-367b-4991-910e-b3ede9622095-kube-api-access-xkxr7" (OuterVolumeSpecName: "kube-api-access-xkxr7") pod "009b9521-367b-4991-910e-b3ede9622095" (UID: "009b9521-367b-4991-910e-b3ede9622095"). InnerVolumeSpecName "kube-api-access-xkxr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:50 crc kubenswrapper[4809]: E0930 00:32:50.285158 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="009b9521-367b-4991-910e-b3ede9622095" containerName="nova-manage" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.285196 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="009b9521-367b-4991-910e-b3ede9622095" containerName="nova-manage" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.285476 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="009b9521-367b-4991-910e-b3ede9622095" containerName="nova-manage" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.287447 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/009b9521-367b-4991-910e-b3ede9622095-scripts" (OuterVolumeSpecName: "scripts") pod "009b9521-367b-4991-910e-b3ede9622095" (UID: "009b9521-367b-4991-910e-b3ede9622095"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.287474 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rgnkd"] Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.287559 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rgnkd" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.313118 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/009b9521-367b-4991-910e-b3ede9622095-config-data" (OuterVolumeSpecName: "config-data") pod "009b9521-367b-4991-910e-b3ede9622095" (UID: "009b9521-367b-4991-910e-b3ede9622095"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.336689 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/009b9521-367b-4991-910e-b3ede9622095-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "009b9521-367b-4991-910e-b3ede9622095" (UID: "009b9521-367b-4991-910e-b3ede9622095"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.360036 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c3abaf3-fb17-4a35-9dbd-eeef2c30657a-catalog-content\") pod \"community-operators-rgnkd\" (UID: \"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a\") " pod="openshift-marketplace/community-operators-rgnkd" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.360218 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mswxm\" (UniqueName: \"kubernetes.io/projected/9c3abaf3-fb17-4a35-9dbd-eeef2c30657a-kube-api-access-mswxm\") pod \"community-operators-rgnkd\" (UID: \"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a\") " pod="openshift-marketplace/community-operators-rgnkd" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.360367 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c3abaf3-fb17-4a35-9dbd-eeef2c30657a-utilities\") pod \"community-operators-rgnkd\" (UID: \"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a\") " pod="openshift-marketplace/community-operators-rgnkd" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.360549 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/009b9521-367b-4991-910e-b3ede9622095-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.360687 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/009b9521-367b-4991-910e-b3ede9622095-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.360731 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/009b9521-367b-4991-910e-b3ede9622095-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.360747 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkxr7\" (UniqueName: \"kubernetes.io/projected/009b9521-367b-4991-910e-b3ede9622095-kube-api-access-xkxr7\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.462476 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c3abaf3-fb17-4a35-9dbd-eeef2c30657a-catalog-content\") pod \"community-operators-rgnkd\" (UID: \"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a\") " pod="openshift-marketplace/community-operators-rgnkd" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.462575 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mswxm\" (UniqueName: \"kubernetes.io/projected/9c3abaf3-fb17-4a35-9dbd-eeef2c30657a-kube-api-access-mswxm\") pod \"community-operators-rgnkd\" (UID: \"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a\") " pod="openshift-marketplace/community-operators-rgnkd" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.462598 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c3abaf3-fb17-4a35-9dbd-eeef2c30657a-utilities\") pod \"community-operators-rgnkd\" (UID: \"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a\") " pod="openshift-marketplace/community-operators-rgnkd" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.462971 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c3abaf3-fb17-4a35-9dbd-eeef2c30657a-catalog-content\") pod \"community-operators-rgnkd\" (UID: \"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a\") " pod="openshift-marketplace/community-operators-rgnkd" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.463003 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c3abaf3-fb17-4a35-9dbd-eeef2c30657a-utilities\") pod \"community-operators-rgnkd\" (UID: \"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a\") " pod="openshift-marketplace/community-operators-rgnkd" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.482919 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mswxm\" (UniqueName: \"kubernetes.io/projected/9c3abaf3-fb17-4a35-9dbd-eeef2c30657a-kube-api-access-mswxm\") pod \"community-operators-rgnkd\" (UID: \"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a\") " pod="openshift-marketplace/community-operators-rgnkd" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.689144 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-c2txl" event={"ID":"009b9521-367b-4991-910e-b3ede9622095","Type":"ContainerDied","Data":"88b49b8b4f26204062272d1e1dffc2b4975693f3ba9d802f67ea90ef4e8bdf31"} Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.689182 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88b49b8b4f26204062272d1e1dffc2b4975693f3ba9d802f67ea90ef4e8bdf31" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.689247 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-c2txl" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.745215 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rgnkd" Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.893492 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.893777 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e9293a09-0853-43ee-bf42-6e634b4ce617" containerName="nova-api-log" containerID="cri-o://d1c962b5992318ae0cdc5e21ea6f0b18c12adb24e75b2ecc903f587740862d54" gracePeriod=30 Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.894251 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e9293a09-0853-43ee-bf42-6e634b4ce617" containerName="nova-api-api" containerID="cri-o://a66e14a88c6603916e2ecbd6ec45b34d6a08e99eea7a88a408515dbc9fe05c1c" gracePeriod=30 Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.902938 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.903173 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="fa11667b-2b05-4bfb-a508-c7e8d3b71bf3" containerName="nova-scheduler-scheduler" containerID="cri-o://3cfba605cda60c9954d08c679ee8e9232748d778897eed966e4c0ad2ddff3905" gracePeriod=30 Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.948939 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.949217 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9" containerName="nova-metadata-log" containerID="cri-o://2d127b7e0dabcbe377ef4d1296807b041ae66c691d1b72db69ff6e1ea95442da" gracePeriod=30 Sep 30 00:32:50 crc kubenswrapper[4809]: I0930 00:32:50.949367 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9" containerName="nova-metadata-metadata" containerID="cri-o://09f7c1510122742ddfb198741be83dc137d537e714e0eecaee7d379cc430dd1a" gracePeriod=30 Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.388083 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rgnkd"] Sep 30 00:32:51 crc kubenswrapper[4809]: W0930 00:32:51.392947 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c3abaf3_fb17_4a35_9dbd_eeef2c30657a.slice/crio-0e7cbf3c0d25dea69e176ce69200dedb1b8748bf4c1d33ceaa62a5f5052ae064 WatchSource:0}: Error finding container 0e7cbf3c0d25dea69e176ce69200dedb1b8748bf4c1d33ceaa62a5f5052ae064: Status 404 returned error can't find the container with id 0e7cbf3c0d25dea69e176ce69200dedb1b8748bf4c1d33ceaa62a5f5052ae064 Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.615046 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.701927 4809 generic.go:334] "Generic (PLEG): container finished" podID="e9293a09-0853-43ee-bf42-6e634b4ce617" containerID="a66e14a88c6603916e2ecbd6ec45b34d6a08e99eea7a88a408515dbc9fe05c1c" exitCode=0 Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.701960 4809 generic.go:334] "Generic (PLEG): container finished" podID="e9293a09-0853-43ee-bf42-6e634b4ce617" containerID="d1c962b5992318ae0cdc5e21ea6f0b18c12adb24e75b2ecc903f587740862d54" exitCode=143 Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.702059 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.705201 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-combined-ca-bundle\") pod \"e9293a09-0853-43ee-bf42-6e634b4ce617\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.705295 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-internal-tls-certs\") pod \"e9293a09-0853-43ee-bf42-6e634b4ce617\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.705331 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8twk4\" (UniqueName: \"kubernetes.io/projected/e9293a09-0853-43ee-bf42-6e634b4ce617-kube-api-access-8twk4\") pod \"e9293a09-0853-43ee-bf42-6e634b4ce617\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.705353 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-config-data\") pod \"e9293a09-0853-43ee-bf42-6e634b4ce617\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.705414 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-public-tls-certs\") pod \"e9293a09-0853-43ee-bf42-6e634b4ce617\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.705559 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9293a09-0853-43ee-bf42-6e634b4ce617-logs\") pod \"e9293a09-0853-43ee-bf42-6e634b4ce617\" (UID: \"e9293a09-0853-43ee-bf42-6e634b4ce617\") " Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.707098 4809 generic.go:334] "Generic (PLEG): container finished" podID="9c3abaf3-fb17-4a35-9dbd-eeef2c30657a" containerID="ae20599982105d8cfe303eca7195a3aa9cccd6c8d9fcd4fff628924600d2caca" exitCode=0 Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.712731 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9293a09-0853-43ee-bf42-6e634b4ce617-kube-api-access-8twk4" (OuterVolumeSpecName: "kube-api-access-8twk4") pod "e9293a09-0853-43ee-bf42-6e634b4ce617" (UID: "e9293a09-0853-43ee-bf42-6e634b4ce617"). InnerVolumeSpecName "kube-api-access-8twk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.718877 4809 generic.go:334] "Generic (PLEG): container finished" podID="c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9" containerID="2d127b7e0dabcbe377ef4d1296807b041ae66c691d1b72db69ff6e1ea95442da" exitCode=143 Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.718936 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e9293a09-0853-43ee-bf42-6e634b4ce617","Type":"ContainerDied","Data":"a66e14a88c6603916e2ecbd6ec45b34d6a08e99eea7a88a408515dbc9fe05c1c"} Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.718988 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e9293a09-0853-43ee-bf42-6e634b4ce617","Type":"ContainerDied","Data":"d1c962b5992318ae0cdc5e21ea6f0b18c12adb24e75b2ecc903f587740862d54"} Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.719011 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e9293a09-0853-43ee-bf42-6e634b4ce617","Type":"ContainerDied","Data":"14906ce02fd08931400870f7f5ee96a03da9f8103c0c42ca0a3b30e0c0b2e226"} Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.719022 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rgnkd" event={"ID":"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a","Type":"ContainerDied","Data":"ae20599982105d8cfe303eca7195a3aa9cccd6c8d9fcd4fff628924600d2caca"} Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.719035 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rgnkd" event={"ID":"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a","Type":"ContainerStarted","Data":"0e7cbf3c0d25dea69e176ce69200dedb1b8748bf4c1d33ceaa62a5f5052ae064"} Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.719046 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9","Type":"ContainerDied","Data":"2d127b7e0dabcbe377ef4d1296807b041ae66c691d1b72db69ff6e1ea95442da"} Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.719066 4809 scope.go:117] "RemoveContainer" containerID="a66e14a88c6603916e2ecbd6ec45b34d6a08e99eea7a88a408515dbc9fe05c1c" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.732683 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9293a09-0853-43ee-bf42-6e634b4ce617-logs" (OuterVolumeSpecName: "logs") pod "e9293a09-0853-43ee-bf42-6e634b4ce617" (UID: "e9293a09-0853-43ee-bf42-6e634b4ce617"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.742712 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6afe7005-5d90-43f2-9d21-e3202ab493dd","Type":"ContainerStarted","Data":"b139ac950e4b535e14fc9622b049dd0b9b87d2acdf42c4ae2271588a128a33f3"} Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.751305 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.774945 4809 scope.go:117] "RemoveContainer" containerID="d1c962b5992318ae0cdc5e21ea6f0b18c12adb24e75b2ecc903f587740862d54" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.782123 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e9293a09-0853-43ee-bf42-6e634b4ce617" (UID: "e9293a09-0853-43ee-bf42-6e634b4ce617"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.802840 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-config-data" (OuterVolumeSpecName: "config-data") pod "e9293a09-0853-43ee-bf42-6e634b4ce617" (UID: "e9293a09-0853-43ee-bf42-6e634b4ce617"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.808957 4809 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9293a09-0853-43ee-bf42-6e634b4ce617-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.809012 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.809023 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8twk4\" (UniqueName: \"kubernetes.io/projected/e9293a09-0853-43ee-bf42-6e634b4ce617-kube-api-access-8twk4\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.809031 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.814974 4809 scope.go:117] "RemoveContainer" containerID="a66e14a88c6603916e2ecbd6ec45b34d6a08e99eea7a88a408515dbc9fe05c1c" Sep 30 00:32:51 crc kubenswrapper[4809]: E0930 00:32:51.815347 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a66e14a88c6603916e2ecbd6ec45b34d6a08e99eea7a88a408515dbc9fe05c1c\": container with ID starting with a66e14a88c6603916e2ecbd6ec45b34d6a08e99eea7a88a408515dbc9fe05c1c not found: ID does not exist" containerID="a66e14a88c6603916e2ecbd6ec45b34d6a08e99eea7a88a408515dbc9fe05c1c" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.815389 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a66e14a88c6603916e2ecbd6ec45b34d6a08e99eea7a88a408515dbc9fe05c1c"} err="failed to get container status \"a66e14a88c6603916e2ecbd6ec45b34d6a08e99eea7a88a408515dbc9fe05c1c\": rpc error: code = NotFound desc = could not find container \"a66e14a88c6603916e2ecbd6ec45b34d6a08e99eea7a88a408515dbc9fe05c1c\": container with ID starting with a66e14a88c6603916e2ecbd6ec45b34d6a08e99eea7a88a408515dbc9fe05c1c not found: ID does not exist" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.815410 4809 scope.go:117] "RemoveContainer" containerID="d1c962b5992318ae0cdc5e21ea6f0b18c12adb24e75b2ecc903f587740862d54" Sep 30 00:32:51 crc kubenswrapper[4809]: E0930 00:32:51.815698 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1c962b5992318ae0cdc5e21ea6f0b18c12adb24e75b2ecc903f587740862d54\": container with ID starting with d1c962b5992318ae0cdc5e21ea6f0b18c12adb24e75b2ecc903f587740862d54 not found: ID does not exist" containerID="d1c962b5992318ae0cdc5e21ea6f0b18c12adb24e75b2ecc903f587740862d54" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.815717 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1c962b5992318ae0cdc5e21ea6f0b18c12adb24e75b2ecc903f587740862d54"} err="failed to get container status \"d1c962b5992318ae0cdc5e21ea6f0b18c12adb24e75b2ecc903f587740862d54\": rpc error: code = NotFound desc = could not find container \"d1c962b5992318ae0cdc5e21ea6f0b18c12adb24e75b2ecc903f587740862d54\": container with ID starting with d1c962b5992318ae0cdc5e21ea6f0b18c12adb24e75b2ecc903f587740862d54 not found: ID does not exist" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.815732 4809 scope.go:117] "RemoveContainer" containerID="a66e14a88c6603916e2ecbd6ec45b34d6a08e99eea7a88a408515dbc9fe05c1c" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.816174 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a66e14a88c6603916e2ecbd6ec45b34d6a08e99eea7a88a408515dbc9fe05c1c"} err="failed to get container status \"a66e14a88c6603916e2ecbd6ec45b34d6a08e99eea7a88a408515dbc9fe05c1c\": rpc error: code = NotFound desc = could not find container \"a66e14a88c6603916e2ecbd6ec45b34d6a08e99eea7a88a408515dbc9fe05c1c\": container with ID starting with a66e14a88c6603916e2ecbd6ec45b34d6a08e99eea7a88a408515dbc9fe05c1c not found: ID does not exist" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.816317 4809 scope.go:117] "RemoveContainer" containerID="d1c962b5992318ae0cdc5e21ea6f0b18c12adb24e75b2ecc903f587740862d54" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.816581 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1c962b5992318ae0cdc5e21ea6f0b18c12adb24e75b2ecc903f587740862d54"} err="failed to get container status \"d1c962b5992318ae0cdc5e21ea6f0b18c12adb24e75b2ecc903f587740862d54\": rpc error: code = NotFound desc = could not find container \"d1c962b5992318ae0cdc5e21ea6f0b18c12adb24e75b2ecc903f587740862d54\": container with ID starting with d1c962b5992318ae0cdc5e21ea6f0b18c12adb24e75b2ecc903f587740862d54 not found: ID does not exist" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.821143 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.5194967889999997 podStartE2EDuration="6.821118771s" podCreationTimestamp="2025-09-30 00:32:45 +0000 UTC" firstStartedPulling="2025-09-30 00:32:46.513530469 +0000 UTC m=+1417.549779877" lastFinishedPulling="2025-09-30 00:32:50.815152461 +0000 UTC m=+1421.851401859" observedRunningTime="2025-09-30 00:32:51.800432155 +0000 UTC m=+1422.836681563" watchObservedRunningTime="2025-09-30 00:32:51.821118771 +0000 UTC m=+1422.857368179" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.822825 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e9293a09-0853-43ee-bf42-6e634b4ce617" (UID: "e9293a09-0853-43ee-bf42-6e634b4ce617"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.833803 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "e9293a09-0853-43ee-bf42-6e634b4ce617" (UID: "e9293a09-0853-43ee-bf42-6e634b4ce617"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.911577 4809 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:51 crc kubenswrapper[4809]: I0930 00:32:51.911612 4809 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9293a09-0853-43ee-bf42-6e634b4ce617-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.032340 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.041756 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.060967 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 00:32:52 crc kubenswrapper[4809]: E0930 00:32:52.061375 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9293a09-0853-43ee-bf42-6e634b4ce617" containerName="nova-api-log" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.061388 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9293a09-0853-43ee-bf42-6e634b4ce617" containerName="nova-api-log" Sep 30 00:32:52 crc kubenswrapper[4809]: E0930 00:32:52.061426 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9293a09-0853-43ee-bf42-6e634b4ce617" containerName="nova-api-api" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.061431 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9293a09-0853-43ee-bf42-6e634b4ce617" containerName="nova-api-api" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.061635 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9293a09-0853-43ee-bf42-6e634b4ce617" containerName="nova-api-api" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.061667 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9293a09-0853-43ee-bf42-6e634b4ce617" containerName="nova-api-log" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.062707 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.065465 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.069175 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.069425 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.094109 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.117285 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7388abe1-ffab-4b37-8d32-4677f7ba0412-public-tls-certs\") pod \"nova-api-0\" (UID: \"7388abe1-ffab-4b37-8d32-4677f7ba0412\") " pod="openstack/nova-api-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.117387 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7388abe1-ffab-4b37-8d32-4677f7ba0412-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7388abe1-ffab-4b37-8d32-4677f7ba0412\") " pod="openstack/nova-api-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.117497 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7388abe1-ffab-4b37-8d32-4677f7ba0412-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7388abe1-ffab-4b37-8d32-4677f7ba0412\") " pod="openstack/nova-api-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.117582 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7388abe1-ffab-4b37-8d32-4677f7ba0412-config-data\") pod \"nova-api-0\" (UID: \"7388abe1-ffab-4b37-8d32-4677f7ba0412\") " pod="openstack/nova-api-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.117620 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7388abe1-ffab-4b37-8d32-4677f7ba0412-logs\") pod \"nova-api-0\" (UID: \"7388abe1-ffab-4b37-8d32-4677f7ba0412\") " pod="openstack/nova-api-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.117662 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrx2x\" (UniqueName: \"kubernetes.io/projected/7388abe1-ffab-4b37-8d32-4677f7ba0412-kube-api-access-nrx2x\") pod \"nova-api-0\" (UID: \"7388abe1-ffab-4b37-8d32-4677f7ba0412\") " pod="openstack/nova-api-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.219379 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7388abe1-ffab-4b37-8d32-4677f7ba0412-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7388abe1-ffab-4b37-8d32-4677f7ba0412\") " pod="openstack/nova-api-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.219844 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7388abe1-ffab-4b37-8d32-4677f7ba0412-config-data\") pod \"nova-api-0\" (UID: \"7388abe1-ffab-4b37-8d32-4677f7ba0412\") " pod="openstack/nova-api-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.219892 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7388abe1-ffab-4b37-8d32-4677f7ba0412-logs\") pod \"nova-api-0\" (UID: \"7388abe1-ffab-4b37-8d32-4677f7ba0412\") " pod="openstack/nova-api-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.219923 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrx2x\" (UniqueName: \"kubernetes.io/projected/7388abe1-ffab-4b37-8d32-4677f7ba0412-kube-api-access-nrx2x\") pod \"nova-api-0\" (UID: \"7388abe1-ffab-4b37-8d32-4677f7ba0412\") " pod="openstack/nova-api-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.219980 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7388abe1-ffab-4b37-8d32-4677f7ba0412-public-tls-certs\") pod \"nova-api-0\" (UID: \"7388abe1-ffab-4b37-8d32-4677f7ba0412\") " pod="openstack/nova-api-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.220039 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7388abe1-ffab-4b37-8d32-4677f7ba0412-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7388abe1-ffab-4b37-8d32-4677f7ba0412\") " pod="openstack/nova-api-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.220430 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7388abe1-ffab-4b37-8d32-4677f7ba0412-logs\") pod \"nova-api-0\" (UID: \"7388abe1-ffab-4b37-8d32-4677f7ba0412\") " pod="openstack/nova-api-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.232185 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7388abe1-ffab-4b37-8d32-4677f7ba0412-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7388abe1-ffab-4b37-8d32-4677f7ba0412\") " pod="openstack/nova-api-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.232485 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7388abe1-ffab-4b37-8d32-4677f7ba0412-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7388abe1-ffab-4b37-8d32-4677f7ba0412\") " pod="openstack/nova-api-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.248321 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7388abe1-ffab-4b37-8d32-4677f7ba0412-public-tls-certs\") pod \"nova-api-0\" (UID: \"7388abe1-ffab-4b37-8d32-4677f7ba0412\") " pod="openstack/nova-api-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.251779 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7388abe1-ffab-4b37-8d32-4677f7ba0412-config-data\") pod \"nova-api-0\" (UID: \"7388abe1-ffab-4b37-8d32-4677f7ba0412\") " pod="openstack/nova-api-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.260732 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrx2x\" (UniqueName: \"kubernetes.io/projected/7388abe1-ffab-4b37-8d32-4677f7ba0412-kube-api-access-nrx2x\") pod \"nova-api-0\" (UID: \"7388abe1-ffab-4b37-8d32-4677f7ba0412\") " pod="openstack/nova-api-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.508986 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.543386 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.637312 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa11667b-2b05-4bfb-a508-c7e8d3b71bf3-combined-ca-bundle\") pod \"fa11667b-2b05-4bfb-a508-c7e8d3b71bf3\" (UID: \"fa11667b-2b05-4bfb-a508-c7e8d3b71bf3\") " Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.637633 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2x6g9\" (UniqueName: \"kubernetes.io/projected/fa11667b-2b05-4bfb-a508-c7e8d3b71bf3-kube-api-access-2x6g9\") pod \"fa11667b-2b05-4bfb-a508-c7e8d3b71bf3\" (UID: \"fa11667b-2b05-4bfb-a508-c7e8d3b71bf3\") " Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.637941 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa11667b-2b05-4bfb-a508-c7e8d3b71bf3-config-data\") pod \"fa11667b-2b05-4bfb-a508-c7e8d3b71bf3\" (UID: \"fa11667b-2b05-4bfb-a508-c7e8d3b71bf3\") " Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.642715 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa11667b-2b05-4bfb-a508-c7e8d3b71bf3-kube-api-access-2x6g9" (OuterVolumeSpecName: "kube-api-access-2x6g9") pod "fa11667b-2b05-4bfb-a508-c7e8d3b71bf3" (UID: "fa11667b-2b05-4bfb-a508-c7e8d3b71bf3"). InnerVolumeSpecName "kube-api-access-2x6g9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.678834 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa11667b-2b05-4bfb-a508-c7e8d3b71bf3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fa11667b-2b05-4bfb-a508-c7e8d3b71bf3" (UID: "fa11667b-2b05-4bfb-a508-c7e8d3b71bf3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.682781 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa11667b-2b05-4bfb-a508-c7e8d3b71bf3-config-data" (OuterVolumeSpecName: "config-data") pod "fa11667b-2b05-4bfb-a508-c7e8d3b71bf3" (UID: "fa11667b-2b05-4bfb-a508-c7e8d3b71bf3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.745969 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa11667b-2b05-4bfb-a508-c7e8d3b71bf3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.746302 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2x6g9\" (UniqueName: \"kubernetes.io/projected/fa11667b-2b05-4bfb-a508-c7e8d3b71bf3-kube-api-access-2x6g9\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.746321 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa11667b-2b05-4bfb-a508-c7e8d3b71bf3-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.759439 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rgnkd" event={"ID":"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a","Type":"ContainerStarted","Data":"dcbccc737f88305cdf0ae39783f1bd8027b34897b90d4e1c42687a75d578a31a"} Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.762406 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.762492 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fa11667b-2b05-4bfb-a508-c7e8d3b71bf3","Type":"ContainerDied","Data":"3cfba605cda60c9954d08c679ee8e9232748d778897eed966e4c0ad2ddff3905"} Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.762551 4809 scope.go:117] "RemoveContainer" containerID="3cfba605cda60c9954d08c679ee8e9232748d778897eed966e4c0ad2ddff3905" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.762601 4809 generic.go:334] "Generic (PLEG): container finished" podID="fa11667b-2b05-4bfb-a508-c7e8d3b71bf3" containerID="3cfba605cda60c9954d08c679ee8e9232748d778897eed966e4c0ad2ddff3905" exitCode=0 Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.762671 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fa11667b-2b05-4bfb-a508-c7e8d3b71bf3","Type":"ContainerDied","Data":"650a827ee309a1cf392f4a5c31235a4cfa5a7f568e0d4663aebf17d7aedbe182"} Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.794680 4809 scope.go:117] "RemoveContainer" containerID="3cfba605cda60c9954d08c679ee8e9232748d778897eed966e4c0ad2ddff3905" Sep 30 00:32:52 crc kubenswrapper[4809]: E0930 00:32:52.795445 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cfba605cda60c9954d08c679ee8e9232748d778897eed966e4c0ad2ddff3905\": container with ID starting with 3cfba605cda60c9954d08c679ee8e9232748d778897eed966e4c0ad2ddff3905 not found: ID does not exist" containerID="3cfba605cda60c9954d08c679ee8e9232748d778897eed966e4c0ad2ddff3905" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.795478 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cfba605cda60c9954d08c679ee8e9232748d778897eed966e4c0ad2ddff3905"} err="failed to get container status \"3cfba605cda60c9954d08c679ee8e9232748d778897eed966e4c0ad2ddff3905\": rpc error: code = NotFound desc = could not find container \"3cfba605cda60c9954d08c679ee8e9232748d778897eed966e4c0ad2ddff3905\": container with ID starting with 3cfba605cda60c9954d08c679ee8e9232748d778897eed966e4c0ad2ddff3905 not found: ID does not exist" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.815802 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.833878 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.846232 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:32:52 crc kubenswrapper[4809]: E0930 00:32:52.846705 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa11667b-2b05-4bfb-a508-c7e8d3b71bf3" containerName="nova-scheduler-scheduler" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.846726 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa11667b-2b05-4bfb-a508-c7e8d3b71bf3" containerName="nova-scheduler-scheduler" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.846982 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa11667b-2b05-4bfb-a508-c7e8d3b71bf3" containerName="nova-scheduler-scheduler" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.848309 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.851423 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.874048 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.953603 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gw9l9\" (UniqueName: \"kubernetes.io/projected/d6904529-09ba-412f-a78d-0afff0e91091-kube-api-access-gw9l9\") pod \"nova-scheduler-0\" (UID: \"d6904529-09ba-412f-a78d-0afff0e91091\") " pod="openstack/nova-scheduler-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.953672 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6904529-09ba-412f-a78d-0afff0e91091-config-data\") pod \"nova-scheduler-0\" (UID: \"d6904529-09ba-412f-a78d-0afff0e91091\") " pod="openstack/nova-scheduler-0" Sep 30 00:32:52 crc kubenswrapper[4809]: I0930 00:32:52.953707 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6904529-09ba-412f-a78d-0afff0e91091-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d6904529-09ba-412f-a78d-0afff0e91091\") " pod="openstack/nova-scheduler-0" Sep 30 00:32:52 crc kubenswrapper[4809]: W0930 00:32:52.999375 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7388abe1_ffab_4b37_8d32_4677f7ba0412.slice/crio-4dba5982ab26b6dfe3f5507c407013e5324df39dcc2ae273d63f349dd0feb347 WatchSource:0}: Error finding container 4dba5982ab26b6dfe3f5507c407013e5324df39dcc2ae273d63f349dd0feb347: Status 404 returned error can't find the container with id 4dba5982ab26b6dfe3f5507c407013e5324df39dcc2ae273d63f349dd0feb347 Sep 30 00:32:53 crc kubenswrapper[4809]: I0930 00:32:53.005161 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:32:53 crc kubenswrapper[4809]: I0930 00:32:53.055224 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gw9l9\" (UniqueName: \"kubernetes.io/projected/d6904529-09ba-412f-a78d-0afff0e91091-kube-api-access-gw9l9\") pod \"nova-scheduler-0\" (UID: \"d6904529-09ba-412f-a78d-0afff0e91091\") " pod="openstack/nova-scheduler-0" Sep 30 00:32:53 crc kubenswrapper[4809]: I0930 00:32:53.055545 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6904529-09ba-412f-a78d-0afff0e91091-config-data\") pod \"nova-scheduler-0\" (UID: \"d6904529-09ba-412f-a78d-0afff0e91091\") " pod="openstack/nova-scheduler-0" Sep 30 00:32:53 crc kubenswrapper[4809]: I0930 00:32:53.055571 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6904529-09ba-412f-a78d-0afff0e91091-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d6904529-09ba-412f-a78d-0afff0e91091\") " pod="openstack/nova-scheduler-0" Sep 30 00:32:53 crc kubenswrapper[4809]: I0930 00:32:53.060426 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6904529-09ba-412f-a78d-0afff0e91091-config-data\") pod \"nova-scheduler-0\" (UID: \"d6904529-09ba-412f-a78d-0afff0e91091\") " pod="openstack/nova-scheduler-0" Sep 30 00:32:53 crc kubenswrapper[4809]: I0930 00:32:53.060459 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6904529-09ba-412f-a78d-0afff0e91091-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d6904529-09ba-412f-a78d-0afff0e91091\") " pod="openstack/nova-scheduler-0" Sep 30 00:32:53 crc kubenswrapper[4809]: I0930 00:32:53.073609 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gw9l9\" (UniqueName: \"kubernetes.io/projected/d6904529-09ba-412f-a78d-0afff0e91091-kube-api-access-gw9l9\") pod \"nova-scheduler-0\" (UID: \"d6904529-09ba-412f-a78d-0afff0e91091\") " pod="openstack/nova-scheduler-0" Sep 30 00:32:53 crc kubenswrapper[4809]: I0930 00:32:53.174439 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:32:53 crc kubenswrapper[4809]: I0930 00:32:53.713063 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9293a09-0853-43ee-bf42-6e634b4ce617" path="/var/lib/kubelet/pods/e9293a09-0853-43ee-bf42-6e634b4ce617/volumes" Sep 30 00:32:53 crc kubenswrapper[4809]: I0930 00:32:53.714070 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa11667b-2b05-4bfb-a508-c7e8d3b71bf3" path="/var/lib/kubelet/pods/fa11667b-2b05-4bfb-a508-c7e8d3b71bf3/volumes" Sep 30 00:32:53 crc kubenswrapper[4809]: I0930 00:32:53.728989 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:32:53 crc kubenswrapper[4809]: I0930 00:32:53.821893 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d6904529-09ba-412f-a78d-0afff0e91091","Type":"ContainerStarted","Data":"373c6d310a4805ffe1f7e5dd31f6eea653b8eb706d1a76886ae3185f3aa383db"} Sep 30 00:32:53 crc kubenswrapper[4809]: I0930 00:32:53.834032 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7388abe1-ffab-4b37-8d32-4677f7ba0412","Type":"ContainerStarted","Data":"995c6f3f765a870120716c01428680d6ceadd924112253365c412d2553b0819b"} Sep 30 00:32:53 crc kubenswrapper[4809]: I0930 00:32:53.834077 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7388abe1-ffab-4b37-8d32-4677f7ba0412","Type":"ContainerStarted","Data":"68c3527ea5275c5e001cf56db84906a9e81a025372421e6b9585b5127be6a0e9"} Sep 30 00:32:53 crc kubenswrapper[4809]: I0930 00:32:53.834086 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7388abe1-ffab-4b37-8d32-4677f7ba0412","Type":"ContainerStarted","Data":"4dba5982ab26b6dfe3f5507c407013e5324df39dcc2ae273d63f349dd0feb347"} Sep 30 00:32:53 crc kubenswrapper[4809]: I0930 00:32:53.836866 4809 generic.go:334] "Generic (PLEG): container finished" podID="9c3abaf3-fb17-4a35-9dbd-eeef2c30657a" containerID="dcbccc737f88305cdf0ae39783f1bd8027b34897b90d4e1c42687a75d578a31a" exitCode=0 Sep 30 00:32:53 crc kubenswrapper[4809]: I0930 00:32:53.837500 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rgnkd" event={"ID":"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a","Type":"ContainerDied","Data":"dcbccc737f88305cdf0ae39783f1bd8027b34897b90d4e1c42687a75d578a31a"} Sep 30 00:32:53 crc kubenswrapper[4809]: I0930 00:32:53.871058 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.871038969 podStartE2EDuration="1.871038969s" podCreationTimestamp="2025-09-30 00:32:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:32:53.867584095 +0000 UTC m=+1424.903833503" watchObservedRunningTime="2025-09-30 00:32:53.871038969 +0000 UTC m=+1424.907288367" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.114747 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.226:8775/\": read tcp 10.217.0.2:42004->10.217.0.226:8775: read: connection reset by peer" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.114918 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.226:8775/\": read tcp 10.217.0.2:42016->10.217.0.226:8775: read: connection reset by peer" Sep 30 00:32:54 crc kubenswrapper[4809]: E0930 00:32:54.259965 4809 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc6008f87_2cb6_4c50_b9cf_b5953ba8e9b9.slice/crio-09f7c1510122742ddfb198741be83dc137d537e714e0eecaee7d379cc430dd1a.scope\": RecentStats: unable to find data in memory cache]" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.664513 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.697068 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-combined-ca-bundle\") pod \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\" (UID: \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\") " Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.697127 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmbqd\" (UniqueName: \"kubernetes.io/projected/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-kube-api-access-gmbqd\") pod \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\" (UID: \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\") " Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.697356 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-nova-metadata-tls-certs\") pod \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\" (UID: \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\") " Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.697399 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-logs\") pod \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\" (UID: \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\") " Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.697523 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-config-data\") pod \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\" (UID: \"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9\") " Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.700422 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-logs" (OuterVolumeSpecName: "logs") pod "c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9" (UID: "c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.715098 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-kube-api-access-gmbqd" (OuterVolumeSpecName: "kube-api-access-gmbqd") pod "c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9" (UID: "c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9"). InnerVolumeSpecName "kube-api-access-gmbqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.742337 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9" (UID: "c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.776469 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-config-data" (OuterVolumeSpecName: "config-data") pod "c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9" (UID: "c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.800126 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.800162 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmbqd\" (UniqueName: \"kubernetes.io/projected/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-kube-api-access-gmbqd\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.800174 4809 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.800183 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.813382 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9" (UID: "c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.850365 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d6904529-09ba-412f-a78d-0afff0e91091","Type":"ContainerStarted","Data":"33854ea62cc05846ab98562322caa0850ec4ca112cf0e2c39a0dc5241ba45e54"} Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.854772 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rgnkd" event={"ID":"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a","Type":"ContainerStarted","Data":"e23b2903c81e7379fc434415b27baf1ee2cf049a645a0dd5770776ef817dc25d"} Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.858512 4809 generic.go:334] "Generic (PLEG): container finished" podID="c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9" containerID="09f7c1510122742ddfb198741be83dc137d537e714e0eecaee7d379cc430dd1a" exitCode=0 Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.859151 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.861486 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9","Type":"ContainerDied","Data":"09f7c1510122742ddfb198741be83dc137d537e714e0eecaee7d379cc430dd1a"} Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.861537 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9","Type":"ContainerDied","Data":"4471cd95b00fd278801daa9d008d347325d2d01e1fa790af34743bcb0ee8d678"} Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.861555 4809 scope.go:117] "RemoveContainer" containerID="09f7c1510122742ddfb198741be83dc137d537e714e0eecaee7d379cc430dd1a" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.888171 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.888149433 podStartE2EDuration="2.888149433s" podCreationTimestamp="2025-09-30 00:32:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:32:54.869201896 +0000 UTC m=+1425.905451304" watchObservedRunningTime="2025-09-30 00:32:54.888149433 +0000 UTC m=+1425.924398841" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.892622 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rgnkd" podStartSLOduration=2.328177312 podStartE2EDuration="4.892604506s" podCreationTimestamp="2025-09-30 00:32:50 +0000 UTC" firstStartedPulling="2025-09-30 00:32:51.730537825 +0000 UTC m=+1422.766787233" lastFinishedPulling="2025-09-30 00:32:54.294965019 +0000 UTC m=+1425.331214427" observedRunningTime="2025-09-30 00:32:54.892171124 +0000 UTC m=+1425.928420552" watchObservedRunningTime="2025-09-30 00:32:54.892604506 +0000 UTC m=+1425.928853914" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.895210 4809 scope.go:117] "RemoveContainer" containerID="2d127b7e0dabcbe377ef4d1296807b041ae66c691d1b72db69ff6e1ea95442da" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.902343 4809 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.913384 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.922273 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.938460 4809 scope.go:117] "RemoveContainer" containerID="09f7c1510122742ddfb198741be83dc137d537e714e0eecaee7d379cc430dd1a" Sep 30 00:32:54 crc kubenswrapper[4809]: E0930 00:32:54.940767 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09f7c1510122742ddfb198741be83dc137d537e714e0eecaee7d379cc430dd1a\": container with ID starting with 09f7c1510122742ddfb198741be83dc137d537e714e0eecaee7d379cc430dd1a not found: ID does not exist" containerID="09f7c1510122742ddfb198741be83dc137d537e714e0eecaee7d379cc430dd1a" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.940803 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09f7c1510122742ddfb198741be83dc137d537e714e0eecaee7d379cc430dd1a"} err="failed to get container status \"09f7c1510122742ddfb198741be83dc137d537e714e0eecaee7d379cc430dd1a\": rpc error: code = NotFound desc = could not find container \"09f7c1510122742ddfb198741be83dc137d537e714e0eecaee7d379cc430dd1a\": container with ID starting with 09f7c1510122742ddfb198741be83dc137d537e714e0eecaee7d379cc430dd1a not found: ID does not exist" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.940824 4809 scope.go:117] "RemoveContainer" containerID="2d127b7e0dabcbe377ef4d1296807b041ae66c691d1b72db69ff6e1ea95442da" Sep 30 00:32:54 crc kubenswrapper[4809]: E0930 00:32:54.952556 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d127b7e0dabcbe377ef4d1296807b041ae66c691d1b72db69ff6e1ea95442da\": container with ID starting with 2d127b7e0dabcbe377ef4d1296807b041ae66c691d1b72db69ff6e1ea95442da not found: ID does not exist" containerID="2d127b7e0dabcbe377ef4d1296807b041ae66c691d1b72db69ff6e1ea95442da" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.952613 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d127b7e0dabcbe377ef4d1296807b041ae66c691d1b72db69ff6e1ea95442da"} err="failed to get container status \"2d127b7e0dabcbe377ef4d1296807b041ae66c691d1b72db69ff6e1ea95442da\": rpc error: code = NotFound desc = could not find container \"2d127b7e0dabcbe377ef4d1296807b041ae66c691d1b72db69ff6e1ea95442da\": container with ID starting with 2d127b7e0dabcbe377ef4d1296807b041ae66c691d1b72db69ff6e1ea95442da not found: ID does not exist" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.958421 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:32:54 crc kubenswrapper[4809]: E0930 00:32:54.959056 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9" containerName="nova-metadata-log" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.959081 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9" containerName="nova-metadata-log" Sep 30 00:32:54 crc kubenswrapper[4809]: E0930 00:32:54.959096 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9" containerName="nova-metadata-metadata" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.959104 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9" containerName="nova-metadata-metadata" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.959372 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9" containerName="nova-metadata-log" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.959407 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9" containerName="nova-metadata-metadata" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.960924 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.968097 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.968360 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 00:32:54 crc kubenswrapper[4809]: I0930 00:32:54.980166 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:32:55 crc kubenswrapper[4809]: I0930 00:32:55.003877 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:55 crc kubenswrapper[4809]: I0930 00:32:55.003950 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:55 crc kubenswrapper[4809]: I0930 00:32:55.003971 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nz8wb\" (UniqueName: \"kubernetes.io/projected/ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb-kube-api-access-nz8wb\") pod \"nova-metadata-0\" (UID: \"ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:55 crc kubenswrapper[4809]: I0930 00:32:55.003997 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb-config-data\") pod \"nova-metadata-0\" (UID: \"ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:55 crc kubenswrapper[4809]: I0930 00:32:55.004019 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb-logs\") pod \"nova-metadata-0\" (UID: \"ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:55 crc kubenswrapper[4809]: I0930 00:32:55.105952 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:55 crc kubenswrapper[4809]: I0930 00:32:55.106047 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:55 crc kubenswrapper[4809]: I0930 00:32:55.106082 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nz8wb\" (UniqueName: \"kubernetes.io/projected/ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb-kube-api-access-nz8wb\") pod \"nova-metadata-0\" (UID: \"ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:55 crc kubenswrapper[4809]: I0930 00:32:55.106118 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb-config-data\") pod \"nova-metadata-0\" (UID: \"ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:55 crc kubenswrapper[4809]: I0930 00:32:55.106147 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb-logs\") pod \"nova-metadata-0\" (UID: \"ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:55 crc kubenswrapper[4809]: I0930 00:32:55.106874 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb-logs\") pod \"nova-metadata-0\" (UID: \"ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:55 crc kubenswrapper[4809]: I0930 00:32:55.113249 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:55 crc kubenswrapper[4809]: I0930 00:32:55.114216 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:55 crc kubenswrapper[4809]: I0930 00:32:55.121376 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb-config-data\") pod \"nova-metadata-0\" (UID: \"ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:55 crc kubenswrapper[4809]: I0930 00:32:55.123176 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nz8wb\" (UniqueName: \"kubernetes.io/projected/ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb-kube-api-access-nz8wb\") pod \"nova-metadata-0\" (UID: \"ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb\") " pod="openstack/nova-metadata-0" Sep 30 00:32:55 crc kubenswrapper[4809]: I0930 00:32:55.295870 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:32:55 crc kubenswrapper[4809]: I0930 00:32:55.704478 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9" path="/var/lib/kubelet/pods/c6008f87-2cb6-4c50-b9cf-b5953ba8e9b9/volumes" Sep 30 00:32:55 crc kubenswrapper[4809]: W0930 00:32:55.772532 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded84a4ca_87f9_4ef2_aedb_4feb49d6a0fb.slice/crio-d936bf8255bf3bed38aca480c6496f7f0a5cf1ca0d0a9352b4376b84942cb9a0 WatchSource:0}: Error finding container d936bf8255bf3bed38aca480c6496f7f0a5cf1ca0d0a9352b4376b84942cb9a0: Status 404 returned error can't find the container with id d936bf8255bf3bed38aca480c6496f7f0a5cf1ca0d0a9352b4376b84942cb9a0 Sep 30 00:32:55 crc kubenswrapper[4809]: I0930 00:32:55.774670 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:32:55 crc kubenswrapper[4809]: I0930 00:32:55.872489 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb","Type":"ContainerStarted","Data":"d936bf8255bf3bed38aca480c6496f7f0a5cf1ca0d0a9352b4376b84942cb9a0"} Sep 30 00:32:56 crc kubenswrapper[4809]: I0930 00:32:56.886335 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb","Type":"ContainerStarted","Data":"a177f53327cbc9e89de40c95b6c8c6ec084ed99f69b7d4cc277ce6126c462692"} Sep 30 00:32:56 crc kubenswrapper[4809]: I0930 00:32:56.886383 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb","Type":"ContainerStarted","Data":"8cb1ae9e0ebf252b8ca880c0237ff26cf627659c678e5a0bbf42e4a78b9c9fc2"} Sep 30 00:32:56 crc kubenswrapper[4809]: I0930 00:32:56.914129 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.914099846 podStartE2EDuration="2.914099846s" podCreationTimestamp="2025-09-30 00:32:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:32:56.907119516 +0000 UTC m=+1427.943368954" watchObservedRunningTime="2025-09-30 00:32:56.914099846 +0000 UTC m=+1427.950349304" Sep 30 00:32:58 crc kubenswrapper[4809]: I0930 00:32:58.175374 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 00:32:58 crc kubenswrapper[4809]: I0930 00:32:58.920418 4809 generic.go:334] "Generic (PLEG): container finished" podID="f19829b9-a07b-4348-b2c0-31330ecaac0c" containerID="c61ce033cb68d15345f4bc5a8a25756339bc675d1859cc455c5f9c06877ff875" exitCode=137 Sep 30 00:32:58 crc kubenswrapper[4809]: I0930 00:32:58.920766 4809 generic.go:334] "Generic (PLEG): container finished" podID="f19829b9-a07b-4348-b2c0-31330ecaac0c" containerID="a92d489b2aa6996a01bae939e8589815662a9af03f128329371cc63dc9942d1d" exitCode=137 Sep 30 00:32:58 crc kubenswrapper[4809]: I0930 00:32:58.920560 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f19829b9-a07b-4348-b2c0-31330ecaac0c","Type":"ContainerDied","Data":"c61ce033cb68d15345f4bc5a8a25756339bc675d1859cc455c5f9c06877ff875"} Sep 30 00:32:58 crc kubenswrapper[4809]: I0930 00:32:58.920816 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f19829b9-a07b-4348-b2c0-31330ecaac0c","Type":"ContainerDied","Data":"a92d489b2aa6996a01bae939e8589815662a9af03f128329371cc63dc9942d1d"} Sep 30 00:32:59 crc kubenswrapper[4809]: I0930 00:32:59.233336 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Sep 30 00:32:59 crc kubenswrapper[4809]: I0930 00:32:59.402246 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f19829b9-a07b-4348-b2c0-31330ecaac0c-scripts\") pod \"f19829b9-a07b-4348-b2c0-31330ecaac0c\" (UID: \"f19829b9-a07b-4348-b2c0-31330ecaac0c\") " Sep 30 00:32:59 crc kubenswrapper[4809]: I0930 00:32:59.402477 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f19829b9-a07b-4348-b2c0-31330ecaac0c-combined-ca-bundle\") pod \"f19829b9-a07b-4348-b2c0-31330ecaac0c\" (UID: \"f19829b9-a07b-4348-b2c0-31330ecaac0c\") " Sep 30 00:32:59 crc kubenswrapper[4809]: I0930 00:32:59.402536 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f19829b9-a07b-4348-b2c0-31330ecaac0c-config-data\") pod \"f19829b9-a07b-4348-b2c0-31330ecaac0c\" (UID: \"f19829b9-a07b-4348-b2c0-31330ecaac0c\") " Sep 30 00:32:59 crc kubenswrapper[4809]: I0930 00:32:59.402576 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55pq9\" (UniqueName: \"kubernetes.io/projected/f19829b9-a07b-4348-b2c0-31330ecaac0c-kube-api-access-55pq9\") pod \"f19829b9-a07b-4348-b2c0-31330ecaac0c\" (UID: \"f19829b9-a07b-4348-b2c0-31330ecaac0c\") " Sep 30 00:32:59 crc kubenswrapper[4809]: I0930 00:32:59.407923 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f19829b9-a07b-4348-b2c0-31330ecaac0c-scripts" (OuterVolumeSpecName: "scripts") pod "f19829b9-a07b-4348-b2c0-31330ecaac0c" (UID: "f19829b9-a07b-4348-b2c0-31330ecaac0c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:59 crc kubenswrapper[4809]: I0930 00:32:59.417529 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f19829b9-a07b-4348-b2c0-31330ecaac0c-kube-api-access-55pq9" (OuterVolumeSpecName: "kube-api-access-55pq9") pod "f19829b9-a07b-4348-b2c0-31330ecaac0c" (UID: "f19829b9-a07b-4348-b2c0-31330ecaac0c"). InnerVolumeSpecName "kube-api-access-55pq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:59 crc kubenswrapper[4809]: I0930 00:32:59.504513 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55pq9\" (UniqueName: \"kubernetes.io/projected/f19829b9-a07b-4348-b2c0-31330ecaac0c-kube-api-access-55pq9\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:59 crc kubenswrapper[4809]: I0930 00:32:59.504548 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f19829b9-a07b-4348-b2c0-31330ecaac0c-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:59 crc kubenswrapper[4809]: I0930 00:32:59.515161 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f19829b9-a07b-4348-b2c0-31330ecaac0c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f19829b9-a07b-4348-b2c0-31330ecaac0c" (UID: "f19829b9-a07b-4348-b2c0-31330ecaac0c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:59 crc kubenswrapper[4809]: I0930 00:32:59.538248 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f19829b9-a07b-4348-b2c0-31330ecaac0c-config-data" (OuterVolumeSpecName: "config-data") pod "f19829b9-a07b-4348-b2c0-31330ecaac0c" (UID: "f19829b9-a07b-4348-b2c0-31330ecaac0c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:59 crc kubenswrapper[4809]: I0930 00:32:59.606859 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f19829b9-a07b-4348-b2c0-31330ecaac0c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:59 crc kubenswrapper[4809]: I0930 00:32:59.607180 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f19829b9-a07b-4348-b2c0-31330ecaac0c-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:59 crc kubenswrapper[4809]: I0930 00:32:59.947319 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f19829b9-a07b-4348-b2c0-31330ecaac0c","Type":"ContainerDied","Data":"ad4c9369e9b77febfdf3e912895257dbdf1d79ccf14a8719395e95ff93fa3c11"} Sep 30 00:32:59 crc kubenswrapper[4809]: I0930 00:32:59.947374 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Sep 30 00:32:59 crc kubenswrapper[4809]: I0930 00:32:59.947381 4809 scope.go:117] "RemoveContainer" containerID="c61ce033cb68d15345f4bc5a8a25756339bc675d1859cc455c5f9c06877ff875" Sep 30 00:32:59 crc kubenswrapper[4809]: I0930 00:32:59.978608 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-0"] Sep 30 00:32:59 crc kubenswrapper[4809]: I0930 00:32:59.989106 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-0"] Sep 30 00:32:59 crc kubenswrapper[4809]: I0930 00:32:59.994370 4809 scope.go:117] "RemoveContainer" containerID="a92d489b2aa6996a01bae939e8589815662a9af03f128329371cc63dc9942d1d" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.024478 4809 scope.go:117] "RemoveContainer" containerID="6cfd04d8958a676558fa805ab65cc36d6110ad38d065ef78d343466b6179951a" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.030277 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Sep 30 00:33:00 crc kubenswrapper[4809]: E0930 00:33:00.030878 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f19829b9-a07b-4348-b2c0-31330ecaac0c" containerName="aodh-api" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.030899 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="f19829b9-a07b-4348-b2c0-31330ecaac0c" containerName="aodh-api" Sep 30 00:33:00 crc kubenswrapper[4809]: E0930 00:33:00.030917 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f19829b9-a07b-4348-b2c0-31330ecaac0c" containerName="aodh-listener" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.030926 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="f19829b9-a07b-4348-b2c0-31330ecaac0c" containerName="aodh-listener" Sep 30 00:33:00 crc kubenswrapper[4809]: E0930 00:33:00.030977 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f19829b9-a07b-4348-b2c0-31330ecaac0c" containerName="aodh-notifier" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.030985 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="f19829b9-a07b-4348-b2c0-31330ecaac0c" containerName="aodh-notifier" Sep 30 00:33:00 crc kubenswrapper[4809]: E0930 00:33:00.030994 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f19829b9-a07b-4348-b2c0-31330ecaac0c" containerName="aodh-evaluator" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.031001 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="f19829b9-a07b-4348-b2c0-31330ecaac0c" containerName="aodh-evaluator" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.031242 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="f19829b9-a07b-4348-b2c0-31330ecaac0c" containerName="aodh-evaluator" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.031275 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="f19829b9-a07b-4348-b2c0-31330ecaac0c" containerName="aodh-listener" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.031288 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="f19829b9-a07b-4348-b2c0-31330ecaac0c" containerName="aodh-api" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.031304 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="f19829b9-a07b-4348-b2c0-31330ecaac0c" containerName="aodh-notifier" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.033387 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.035972 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.036028 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.036210 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-vzdp5" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.036261 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-aodh-public-svc" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.036445 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-aodh-internal-svc" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.050852 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.076839 4809 scope.go:117] "RemoveContainer" containerID="4ff1ec423a7885991442e6326227e9c80532f8137b7e65867dbc487aa351d0c3" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.117112 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-public-tls-certs\") pod \"aodh-0\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " pod="openstack/aodh-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.117305 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-config-data\") pod \"aodh-0\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " pod="openstack/aodh-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.117446 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-internal-tls-certs\") pod \"aodh-0\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " pod="openstack/aodh-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.117589 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8dl9\" (UniqueName: \"kubernetes.io/projected/369bb779-4353-4a71-b237-da228464a8ac-kube-api-access-c8dl9\") pod \"aodh-0\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " pod="openstack/aodh-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.117766 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-combined-ca-bundle\") pod \"aodh-0\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " pod="openstack/aodh-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.117890 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-scripts\") pod \"aodh-0\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " pod="openstack/aodh-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.220069 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-combined-ca-bundle\") pod \"aodh-0\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " pod="openstack/aodh-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.220142 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-scripts\") pod \"aodh-0\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " pod="openstack/aodh-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.220210 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-public-tls-certs\") pod \"aodh-0\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " pod="openstack/aodh-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.220346 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-config-data\") pod \"aodh-0\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " pod="openstack/aodh-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.220398 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-internal-tls-certs\") pod \"aodh-0\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " pod="openstack/aodh-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.220451 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8dl9\" (UniqueName: \"kubernetes.io/projected/369bb779-4353-4a71-b237-da228464a8ac-kube-api-access-c8dl9\") pod \"aodh-0\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " pod="openstack/aodh-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.226748 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-public-tls-certs\") pod \"aodh-0\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " pod="openstack/aodh-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.226760 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-combined-ca-bundle\") pod \"aodh-0\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " pod="openstack/aodh-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.233901 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-scripts\") pod \"aodh-0\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " pod="openstack/aodh-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.235905 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-config-data\") pod \"aodh-0\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " pod="openstack/aodh-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.236569 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-internal-tls-certs\") pod \"aodh-0\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " pod="openstack/aodh-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.237972 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8dl9\" (UniqueName: \"kubernetes.io/projected/369bb779-4353-4a71-b237-da228464a8ac-kube-api-access-c8dl9\") pod \"aodh-0\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " pod="openstack/aodh-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.296225 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.296307 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.373118 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.746267 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rgnkd" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.746952 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rgnkd" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.797268 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rgnkd" Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.881500 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Sep 30 00:33:00 crc kubenswrapper[4809]: W0930 00:33:00.885268 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod369bb779_4353_4a71_b237_da228464a8ac.slice/crio-8da5a92395c698976ed4c1e5b8abdec583362e27712d0e0f0727e36f58e50bd4 WatchSource:0}: Error finding container 8da5a92395c698976ed4c1e5b8abdec583362e27712d0e0f0727e36f58e50bd4: Status 404 returned error can't find the container with id 8da5a92395c698976ed4c1e5b8abdec583362e27712d0e0f0727e36f58e50bd4 Sep 30 00:33:00 crc kubenswrapper[4809]: I0930 00:33:00.965032 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"369bb779-4353-4a71-b237-da228464a8ac","Type":"ContainerStarted","Data":"8da5a92395c698976ed4c1e5b8abdec583362e27712d0e0f0727e36f58e50bd4"} Sep 30 00:33:01 crc kubenswrapper[4809]: I0930 00:33:01.013364 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rgnkd" Sep 30 00:33:01 crc kubenswrapper[4809]: I0930 00:33:01.062031 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rgnkd"] Sep 30 00:33:01 crc kubenswrapper[4809]: I0930 00:33:01.706428 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f19829b9-a07b-4348-b2c0-31330ecaac0c" path="/var/lib/kubelet/pods/f19829b9-a07b-4348-b2c0-31330ecaac0c/volumes" Sep 30 00:33:01 crc kubenswrapper[4809]: I0930 00:33:01.984360 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"369bb779-4353-4a71-b237-da228464a8ac","Type":"ContainerStarted","Data":"d56920d393845d389638c59699fb8c6445a5f8bcc693fa087036cbb48e29eda9"} Sep 30 00:33:02 crc kubenswrapper[4809]: I0930 00:33:02.509888 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 00:33:02 crc kubenswrapper[4809]: I0930 00:33:02.510201 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 00:33:02 crc kubenswrapper[4809]: I0930 00:33:02.997340 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"369bb779-4353-4a71-b237-da228464a8ac","Type":"ContainerStarted","Data":"a6c9e186a65ee36f2620f2f0da8c22851cd07774de6b77a42a1a593316fce586"} Sep 30 00:33:02 crc kubenswrapper[4809]: I0930 00:33:02.997502 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rgnkd" podUID="9c3abaf3-fb17-4a35-9dbd-eeef2c30657a" containerName="registry-server" containerID="cri-o://e23b2903c81e7379fc434415b27baf1ee2cf049a645a0dd5770776ef817dc25d" gracePeriod=2 Sep 30 00:33:03 crc kubenswrapper[4809]: I0930 00:33:03.174751 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 00:33:03 crc kubenswrapper[4809]: I0930 00:33:03.221345 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 00:33:03 crc kubenswrapper[4809]: I0930 00:33:03.520982 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7388abe1-ffab-4b37-8d32-4677f7ba0412" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.240:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 00:33:03 crc kubenswrapper[4809]: I0930 00:33:03.521465 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7388abe1-ffab-4b37-8d32-4677f7ba0412" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.240:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 00:33:04 crc kubenswrapper[4809]: I0930 00:33:04.015287 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"369bb779-4353-4a71-b237-da228464a8ac","Type":"ContainerStarted","Data":"ca4c76ef34090d4435bf17161899710f3abde8a9a8bb9ac0d8653218d64e8c73"} Sep 30 00:33:04 crc kubenswrapper[4809]: I0930 00:33:04.017827 4809 generic.go:334] "Generic (PLEG): container finished" podID="9c3abaf3-fb17-4a35-9dbd-eeef2c30657a" containerID="e23b2903c81e7379fc434415b27baf1ee2cf049a645a0dd5770776ef817dc25d" exitCode=0 Sep 30 00:33:04 crc kubenswrapper[4809]: I0930 00:33:04.017911 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rgnkd" event={"ID":"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a","Type":"ContainerDied","Data":"e23b2903c81e7379fc434415b27baf1ee2cf049a645a0dd5770776ef817dc25d"} Sep 30 00:33:04 crc kubenswrapper[4809]: I0930 00:33:04.017959 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rgnkd" event={"ID":"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a","Type":"ContainerDied","Data":"0e7cbf3c0d25dea69e176ce69200dedb1b8748bf4c1d33ceaa62a5f5052ae064"} Sep 30 00:33:04 crc kubenswrapper[4809]: I0930 00:33:04.017971 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e7cbf3c0d25dea69e176ce69200dedb1b8748bf4c1d33ceaa62a5f5052ae064" Sep 30 00:33:04 crc kubenswrapper[4809]: I0930 00:33:04.060325 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 00:33:04 crc kubenswrapper[4809]: I0930 00:33:04.124427 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rgnkd" Sep 30 00:33:04 crc kubenswrapper[4809]: I0930 00:33:04.213914 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c3abaf3-fb17-4a35-9dbd-eeef2c30657a-utilities\") pod \"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a\" (UID: \"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a\") " Sep 30 00:33:04 crc kubenswrapper[4809]: I0930 00:33:04.215287 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mswxm\" (UniqueName: \"kubernetes.io/projected/9c3abaf3-fb17-4a35-9dbd-eeef2c30657a-kube-api-access-mswxm\") pod \"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a\" (UID: \"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a\") " Sep 30 00:33:04 crc kubenswrapper[4809]: I0930 00:33:04.214942 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c3abaf3-fb17-4a35-9dbd-eeef2c30657a-utilities" (OuterVolumeSpecName: "utilities") pod "9c3abaf3-fb17-4a35-9dbd-eeef2c30657a" (UID: "9c3abaf3-fb17-4a35-9dbd-eeef2c30657a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:33:04 crc kubenswrapper[4809]: I0930 00:33:04.215440 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c3abaf3-fb17-4a35-9dbd-eeef2c30657a-catalog-content\") pod \"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a\" (UID: \"9c3abaf3-fb17-4a35-9dbd-eeef2c30657a\") " Sep 30 00:33:04 crc kubenswrapper[4809]: I0930 00:33:04.216061 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c3abaf3-fb17-4a35-9dbd-eeef2c30657a-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:04 crc kubenswrapper[4809]: I0930 00:33:04.219351 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c3abaf3-fb17-4a35-9dbd-eeef2c30657a-kube-api-access-mswxm" (OuterVolumeSpecName: "kube-api-access-mswxm") pod "9c3abaf3-fb17-4a35-9dbd-eeef2c30657a" (UID: "9c3abaf3-fb17-4a35-9dbd-eeef2c30657a"). InnerVolumeSpecName "kube-api-access-mswxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:33:04 crc kubenswrapper[4809]: I0930 00:33:04.283172 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c3abaf3-fb17-4a35-9dbd-eeef2c30657a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9c3abaf3-fb17-4a35-9dbd-eeef2c30657a" (UID: "9c3abaf3-fb17-4a35-9dbd-eeef2c30657a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:33:04 crc kubenswrapper[4809]: I0930 00:33:04.318444 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c3abaf3-fb17-4a35-9dbd-eeef2c30657a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:04 crc kubenswrapper[4809]: I0930 00:33:04.318475 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mswxm\" (UniqueName: \"kubernetes.io/projected/9c3abaf3-fb17-4a35-9dbd-eeef2c30657a-kube-api-access-mswxm\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:05 crc kubenswrapper[4809]: I0930 00:33:05.034293 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rgnkd" Sep 30 00:33:05 crc kubenswrapper[4809]: I0930 00:33:05.037016 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"369bb779-4353-4a71-b237-da228464a8ac","Type":"ContainerStarted","Data":"40d6724fabd9a6b6d92df7cbcf6829e0303ffaaa564de86aa450e282e36aee5c"} Sep 30 00:33:05 crc kubenswrapper[4809]: I0930 00:33:05.071330 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=2.852225689 podStartE2EDuration="6.071308318s" podCreationTimestamp="2025-09-30 00:32:59 +0000 UTC" firstStartedPulling="2025-09-30 00:33:00.887374214 +0000 UTC m=+1431.923623622" lastFinishedPulling="2025-09-30 00:33:04.106456843 +0000 UTC m=+1435.142706251" observedRunningTime="2025-09-30 00:33:05.064341188 +0000 UTC m=+1436.100590616" watchObservedRunningTime="2025-09-30 00:33:05.071308318 +0000 UTC m=+1436.107557726" Sep 30 00:33:05 crc kubenswrapper[4809]: I0930 00:33:05.110629 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rgnkd"] Sep 30 00:33:05 crc kubenswrapper[4809]: I0930 00:33:05.125337 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rgnkd"] Sep 30 00:33:05 crc kubenswrapper[4809]: I0930 00:33:05.296386 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 00:33:05 crc kubenswrapper[4809]: I0930 00:33:05.296435 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 00:33:05 crc kubenswrapper[4809]: I0930 00:33:05.706132 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c3abaf3-fb17-4a35-9dbd-eeef2c30657a" path="/var/lib/kubelet/pods/9c3abaf3-fb17-4a35-9dbd-eeef2c30657a/volumes" Sep 30 00:33:06 crc kubenswrapper[4809]: I0930 00:33:06.311887 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.242:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 00:33:06 crc kubenswrapper[4809]: I0930 00:33:06.311880 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.242:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 00:33:12 crc kubenswrapper[4809]: I0930 00:33:12.517525 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 00:33:12 crc kubenswrapper[4809]: I0930 00:33:12.518371 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 00:33:12 crc kubenswrapper[4809]: I0930 00:33:12.520266 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 00:33:12 crc kubenswrapper[4809]: I0930 00:33:12.526187 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 00:33:13 crc kubenswrapper[4809]: I0930 00:33:13.025990 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vz4tc"] Sep 30 00:33:13 crc kubenswrapper[4809]: E0930 00:33:13.026993 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c3abaf3-fb17-4a35-9dbd-eeef2c30657a" containerName="extract-content" Sep 30 00:33:13 crc kubenswrapper[4809]: I0930 00:33:13.027020 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c3abaf3-fb17-4a35-9dbd-eeef2c30657a" containerName="extract-content" Sep 30 00:33:13 crc kubenswrapper[4809]: E0930 00:33:13.027043 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c3abaf3-fb17-4a35-9dbd-eeef2c30657a" containerName="extract-utilities" Sep 30 00:33:13 crc kubenswrapper[4809]: I0930 00:33:13.027052 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c3abaf3-fb17-4a35-9dbd-eeef2c30657a" containerName="extract-utilities" Sep 30 00:33:13 crc kubenswrapper[4809]: E0930 00:33:13.027088 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c3abaf3-fb17-4a35-9dbd-eeef2c30657a" containerName="registry-server" Sep 30 00:33:13 crc kubenswrapper[4809]: I0930 00:33:13.027102 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c3abaf3-fb17-4a35-9dbd-eeef2c30657a" containerName="registry-server" Sep 30 00:33:13 crc kubenswrapper[4809]: I0930 00:33:13.027462 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c3abaf3-fb17-4a35-9dbd-eeef2c30657a" containerName="registry-server" Sep 30 00:33:13 crc kubenswrapper[4809]: I0930 00:33:13.029585 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vz4tc" Sep 30 00:33:13 crc kubenswrapper[4809]: I0930 00:33:13.055867 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vz4tc"] Sep 30 00:33:13 crc kubenswrapper[4809]: I0930 00:33:13.137518 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c23e442a-7748-4399-ae49-d374767800ef-catalog-content\") pod \"redhat-marketplace-vz4tc\" (UID: \"c23e442a-7748-4399-ae49-d374767800ef\") " pod="openshift-marketplace/redhat-marketplace-vz4tc" Sep 30 00:33:13 crc kubenswrapper[4809]: I0930 00:33:13.137585 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c23e442a-7748-4399-ae49-d374767800ef-utilities\") pod \"redhat-marketplace-vz4tc\" (UID: \"c23e442a-7748-4399-ae49-d374767800ef\") " pod="openshift-marketplace/redhat-marketplace-vz4tc" Sep 30 00:33:13 crc kubenswrapper[4809]: I0930 00:33:13.137665 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bnj77\" (UniqueName: \"kubernetes.io/projected/c23e442a-7748-4399-ae49-d374767800ef-kube-api-access-bnj77\") pod \"redhat-marketplace-vz4tc\" (UID: \"c23e442a-7748-4399-ae49-d374767800ef\") " pod="openshift-marketplace/redhat-marketplace-vz4tc" Sep 30 00:33:13 crc kubenswrapper[4809]: I0930 00:33:13.140534 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 00:33:13 crc kubenswrapper[4809]: I0930 00:33:13.150229 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 00:33:13 crc kubenswrapper[4809]: I0930 00:33:13.240452 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c23e442a-7748-4399-ae49-d374767800ef-catalog-content\") pod \"redhat-marketplace-vz4tc\" (UID: \"c23e442a-7748-4399-ae49-d374767800ef\") " pod="openshift-marketplace/redhat-marketplace-vz4tc" Sep 30 00:33:13 crc kubenswrapper[4809]: I0930 00:33:13.240589 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c23e442a-7748-4399-ae49-d374767800ef-utilities\") pod \"redhat-marketplace-vz4tc\" (UID: \"c23e442a-7748-4399-ae49-d374767800ef\") " pod="openshift-marketplace/redhat-marketplace-vz4tc" Sep 30 00:33:13 crc kubenswrapper[4809]: I0930 00:33:13.240696 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bnj77\" (UniqueName: \"kubernetes.io/projected/c23e442a-7748-4399-ae49-d374767800ef-kube-api-access-bnj77\") pod \"redhat-marketplace-vz4tc\" (UID: \"c23e442a-7748-4399-ae49-d374767800ef\") " pod="openshift-marketplace/redhat-marketplace-vz4tc" Sep 30 00:33:13 crc kubenswrapper[4809]: I0930 00:33:13.242127 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c23e442a-7748-4399-ae49-d374767800ef-utilities\") pod \"redhat-marketplace-vz4tc\" (UID: \"c23e442a-7748-4399-ae49-d374767800ef\") " pod="openshift-marketplace/redhat-marketplace-vz4tc" Sep 30 00:33:13 crc kubenswrapper[4809]: I0930 00:33:13.242534 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c23e442a-7748-4399-ae49-d374767800ef-catalog-content\") pod \"redhat-marketplace-vz4tc\" (UID: \"c23e442a-7748-4399-ae49-d374767800ef\") " pod="openshift-marketplace/redhat-marketplace-vz4tc" Sep 30 00:33:13 crc kubenswrapper[4809]: I0930 00:33:13.272830 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bnj77\" (UniqueName: \"kubernetes.io/projected/c23e442a-7748-4399-ae49-d374767800ef-kube-api-access-bnj77\") pod \"redhat-marketplace-vz4tc\" (UID: \"c23e442a-7748-4399-ae49-d374767800ef\") " pod="openshift-marketplace/redhat-marketplace-vz4tc" Sep 30 00:33:13 crc kubenswrapper[4809]: I0930 00:33:13.376101 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vz4tc" Sep 30 00:33:13 crc kubenswrapper[4809]: I0930 00:33:13.866669 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vz4tc"] Sep 30 00:33:13 crc kubenswrapper[4809]: W0930 00:33:13.873251 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc23e442a_7748_4399_ae49_d374767800ef.slice/crio-253312251dd51956606d7183a2e012d59118098e02ab0ef9befe5338894ed0af WatchSource:0}: Error finding container 253312251dd51956606d7183a2e012d59118098e02ab0ef9befe5338894ed0af: Status 404 returned error can't find the container with id 253312251dd51956606d7183a2e012d59118098e02ab0ef9befe5338894ed0af Sep 30 00:33:14 crc kubenswrapper[4809]: I0930 00:33:14.152785 4809 generic.go:334] "Generic (PLEG): container finished" podID="c23e442a-7748-4399-ae49-d374767800ef" containerID="44e1cf6880d626a899e45c4f943bc0d86afec6123e449fe1f32381c746820b52" exitCode=0 Sep 30 00:33:14 crc kubenswrapper[4809]: I0930 00:33:14.154334 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vz4tc" event={"ID":"c23e442a-7748-4399-ae49-d374767800ef","Type":"ContainerDied","Data":"44e1cf6880d626a899e45c4f943bc0d86afec6123e449fe1f32381c746820b52"} Sep 30 00:33:14 crc kubenswrapper[4809]: I0930 00:33:14.154373 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vz4tc" event={"ID":"c23e442a-7748-4399-ae49-d374767800ef","Type":"ContainerStarted","Data":"253312251dd51956606d7183a2e012d59118098e02ab0ef9befe5338894ed0af"} Sep 30 00:33:15 crc kubenswrapper[4809]: I0930 00:33:15.168529 4809 generic.go:334] "Generic (PLEG): container finished" podID="c23e442a-7748-4399-ae49-d374767800ef" containerID="e754eff451e474e9dca3c0da03c237190d636b351c0fd6a426a2db803be53199" exitCode=0 Sep 30 00:33:15 crc kubenswrapper[4809]: I0930 00:33:15.168726 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vz4tc" event={"ID":"c23e442a-7748-4399-ae49-d374767800ef","Type":"ContainerDied","Data":"e754eff451e474e9dca3c0da03c237190d636b351c0fd6a426a2db803be53199"} Sep 30 00:33:15 crc kubenswrapper[4809]: I0930 00:33:15.303387 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 00:33:15 crc kubenswrapper[4809]: I0930 00:33:15.303980 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 00:33:15 crc kubenswrapper[4809]: I0930 00:33:15.313471 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 00:33:16 crc kubenswrapper[4809]: I0930 00:33:16.061186 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 00:33:16 crc kubenswrapper[4809]: I0930 00:33:16.187325 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 00:33:17 crc kubenswrapper[4809]: I0930 00:33:17.206598 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vz4tc" event={"ID":"c23e442a-7748-4399-ae49-d374767800ef","Type":"ContainerStarted","Data":"761a33ae5243e2f5e1bbc13c5655de28c4edbf4bf8377b8f9d8ce83a098df647"} Sep 30 00:33:23 crc kubenswrapper[4809]: I0930 00:33:23.377034 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vz4tc" Sep 30 00:33:23 crc kubenswrapper[4809]: I0930 00:33:23.377678 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vz4tc" Sep 30 00:33:23 crc kubenswrapper[4809]: I0930 00:33:23.427614 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vz4tc" Sep 30 00:33:23 crc kubenswrapper[4809]: I0930 00:33:23.464990 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vz4tc" podStartSLOduration=9.586670835 podStartE2EDuration="11.464958882s" podCreationTimestamp="2025-09-30 00:33:12 +0000 UTC" firstStartedPulling="2025-09-30 00:33:14.155346597 +0000 UTC m=+1445.191596005" lastFinishedPulling="2025-09-30 00:33:16.033634644 +0000 UTC m=+1447.069884052" observedRunningTime="2025-09-30 00:33:17.232809086 +0000 UTC m=+1448.269058504" watchObservedRunningTime="2025-09-30 00:33:23.464958882 +0000 UTC m=+1454.501208320" Sep 30 00:33:24 crc kubenswrapper[4809]: I0930 00:33:24.334122 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vz4tc" Sep 30 00:33:24 crc kubenswrapper[4809]: I0930 00:33:24.390573 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vz4tc"] Sep 30 00:33:26 crc kubenswrapper[4809]: I0930 00:33:26.305936 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vz4tc" podUID="c23e442a-7748-4399-ae49-d374767800ef" containerName="registry-server" containerID="cri-o://761a33ae5243e2f5e1bbc13c5655de28c4edbf4bf8377b8f9d8ce83a098df647" gracePeriod=2 Sep 30 00:33:26 crc kubenswrapper[4809]: I0930 00:33:26.805194 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vz4tc" Sep 30 00:33:26 crc kubenswrapper[4809]: I0930 00:33:26.967897 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c23e442a-7748-4399-ae49-d374767800ef-utilities\") pod \"c23e442a-7748-4399-ae49-d374767800ef\" (UID: \"c23e442a-7748-4399-ae49-d374767800ef\") " Sep 30 00:33:26 crc kubenswrapper[4809]: I0930 00:33:26.968046 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c23e442a-7748-4399-ae49-d374767800ef-catalog-content\") pod \"c23e442a-7748-4399-ae49-d374767800ef\" (UID: \"c23e442a-7748-4399-ae49-d374767800ef\") " Sep 30 00:33:26 crc kubenswrapper[4809]: I0930 00:33:26.968254 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bnj77\" (UniqueName: \"kubernetes.io/projected/c23e442a-7748-4399-ae49-d374767800ef-kube-api-access-bnj77\") pod \"c23e442a-7748-4399-ae49-d374767800ef\" (UID: \"c23e442a-7748-4399-ae49-d374767800ef\") " Sep 30 00:33:26 crc kubenswrapper[4809]: I0930 00:33:26.968909 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c23e442a-7748-4399-ae49-d374767800ef-utilities" (OuterVolumeSpecName: "utilities") pod "c23e442a-7748-4399-ae49-d374767800ef" (UID: "c23e442a-7748-4399-ae49-d374767800ef"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:33:26 crc kubenswrapper[4809]: I0930 00:33:26.969903 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c23e442a-7748-4399-ae49-d374767800ef-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:26 crc kubenswrapper[4809]: I0930 00:33:26.975823 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c23e442a-7748-4399-ae49-d374767800ef-kube-api-access-bnj77" (OuterVolumeSpecName: "kube-api-access-bnj77") pod "c23e442a-7748-4399-ae49-d374767800ef" (UID: "c23e442a-7748-4399-ae49-d374767800ef"). InnerVolumeSpecName "kube-api-access-bnj77". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:33:26 crc kubenswrapper[4809]: I0930 00:33:26.981122 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c23e442a-7748-4399-ae49-d374767800ef-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c23e442a-7748-4399-ae49-d374767800ef" (UID: "c23e442a-7748-4399-ae49-d374767800ef"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:33:27 crc kubenswrapper[4809]: I0930 00:33:27.072096 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bnj77\" (UniqueName: \"kubernetes.io/projected/c23e442a-7748-4399-ae49-d374767800ef-kube-api-access-bnj77\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:27 crc kubenswrapper[4809]: I0930 00:33:27.072135 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c23e442a-7748-4399-ae49-d374767800ef-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:27 crc kubenswrapper[4809]: I0930 00:33:27.317869 4809 generic.go:334] "Generic (PLEG): container finished" podID="c23e442a-7748-4399-ae49-d374767800ef" containerID="761a33ae5243e2f5e1bbc13c5655de28c4edbf4bf8377b8f9d8ce83a098df647" exitCode=0 Sep 30 00:33:27 crc kubenswrapper[4809]: I0930 00:33:27.317926 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vz4tc" Sep 30 00:33:27 crc kubenswrapper[4809]: I0930 00:33:27.317972 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vz4tc" event={"ID":"c23e442a-7748-4399-ae49-d374767800ef","Type":"ContainerDied","Data":"761a33ae5243e2f5e1bbc13c5655de28c4edbf4bf8377b8f9d8ce83a098df647"} Sep 30 00:33:27 crc kubenswrapper[4809]: I0930 00:33:27.318321 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vz4tc" event={"ID":"c23e442a-7748-4399-ae49-d374767800ef","Type":"ContainerDied","Data":"253312251dd51956606d7183a2e012d59118098e02ab0ef9befe5338894ed0af"} Sep 30 00:33:27 crc kubenswrapper[4809]: I0930 00:33:27.318350 4809 scope.go:117] "RemoveContainer" containerID="761a33ae5243e2f5e1bbc13c5655de28c4edbf4bf8377b8f9d8ce83a098df647" Sep 30 00:33:27 crc kubenswrapper[4809]: I0930 00:33:27.356701 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vz4tc"] Sep 30 00:33:27 crc kubenswrapper[4809]: I0930 00:33:27.363079 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vz4tc"] Sep 30 00:33:27 crc kubenswrapper[4809]: I0930 00:33:27.422873 4809 scope.go:117] "RemoveContainer" containerID="e754eff451e474e9dca3c0da03c237190d636b351c0fd6a426a2db803be53199" Sep 30 00:33:27 crc kubenswrapper[4809]: I0930 00:33:27.528870 4809 scope.go:117] "RemoveContainer" containerID="44e1cf6880d626a899e45c4f943bc0d86afec6123e449fe1f32381c746820b52" Sep 30 00:33:27 crc kubenswrapper[4809]: I0930 00:33:27.561990 4809 scope.go:117] "RemoveContainer" containerID="761a33ae5243e2f5e1bbc13c5655de28c4edbf4bf8377b8f9d8ce83a098df647" Sep 30 00:33:27 crc kubenswrapper[4809]: E0930 00:33:27.562349 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"761a33ae5243e2f5e1bbc13c5655de28c4edbf4bf8377b8f9d8ce83a098df647\": container with ID starting with 761a33ae5243e2f5e1bbc13c5655de28c4edbf4bf8377b8f9d8ce83a098df647 not found: ID does not exist" containerID="761a33ae5243e2f5e1bbc13c5655de28c4edbf4bf8377b8f9d8ce83a098df647" Sep 30 00:33:27 crc kubenswrapper[4809]: I0930 00:33:27.562380 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"761a33ae5243e2f5e1bbc13c5655de28c4edbf4bf8377b8f9d8ce83a098df647"} err="failed to get container status \"761a33ae5243e2f5e1bbc13c5655de28c4edbf4bf8377b8f9d8ce83a098df647\": rpc error: code = NotFound desc = could not find container \"761a33ae5243e2f5e1bbc13c5655de28c4edbf4bf8377b8f9d8ce83a098df647\": container with ID starting with 761a33ae5243e2f5e1bbc13c5655de28c4edbf4bf8377b8f9d8ce83a098df647 not found: ID does not exist" Sep 30 00:33:27 crc kubenswrapper[4809]: I0930 00:33:27.562400 4809 scope.go:117] "RemoveContainer" containerID="e754eff451e474e9dca3c0da03c237190d636b351c0fd6a426a2db803be53199" Sep 30 00:33:27 crc kubenswrapper[4809]: E0930 00:33:27.562564 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e754eff451e474e9dca3c0da03c237190d636b351c0fd6a426a2db803be53199\": container with ID starting with e754eff451e474e9dca3c0da03c237190d636b351c0fd6a426a2db803be53199 not found: ID does not exist" containerID="e754eff451e474e9dca3c0da03c237190d636b351c0fd6a426a2db803be53199" Sep 30 00:33:27 crc kubenswrapper[4809]: I0930 00:33:27.562586 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e754eff451e474e9dca3c0da03c237190d636b351c0fd6a426a2db803be53199"} err="failed to get container status \"e754eff451e474e9dca3c0da03c237190d636b351c0fd6a426a2db803be53199\": rpc error: code = NotFound desc = could not find container \"e754eff451e474e9dca3c0da03c237190d636b351c0fd6a426a2db803be53199\": container with ID starting with e754eff451e474e9dca3c0da03c237190d636b351c0fd6a426a2db803be53199 not found: ID does not exist" Sep 30 00:33:27 crc kubenswrapper[4809]: I0930 00:33:27.562600 4809 scope.go:117] "RemoveContainer" containerID="44e1cf6880d626a899e45c4f943bc0d86afec6123e449fe1f32381c746820b52" Sep 30 00:33:27 crc kubenswrapper[4809]: E0930 00:33:27.562827 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44e1cf6880d626a899e45c4f943bc0d86afec6123e449fe1f32381c746820b52\": container with ID starting with 44e1cf6880d626a899e45c4f943bc0d86afec6123e449fe1f32381c746820b52 not found: ID does not exist" containerID="44e1cf6880d626a899e45c4f943bc0d86afec6123e449fe1f32381c746820b52" Sep 30 00:33:27 crc kubenswrapper[4809]: I0930 00:33:27.562864 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44e1cf6880d626a899e45c4f943bc0d86afec6123e449fe1f32381c746820b52"} err="failed to get container status \"44e1cf6880d626a899e45c4f943bc0d86afec6123e449fe1f32381c746820b52\": rpc error: code = NotFound desc = could not find container \"44e1cf6880d626a899e45c4f943bc0d86afec6123e449fe1f32381c746820b52\": container with ID starting with 44e1cf6880d626a899e45c4f943bc0d86afec6123e449fe1f32381c746820b52 not found: ID does not exist" Sep 30 00:33:27 crc kubenswrapper[4809]: I0930 00:33:27.702764 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c23e442a-7748-4399-ae49-d374767800ef" path="/var/lib/kubelet/pods/c23e442a-7748-4399-ae49-d374767800ef/volumes" Sep 30 00:33:28 crc kubenswrapper[4809]: I0930 00:33:28.285836 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-qttpq"] Sep 30 00:33:28 crc kubenswrapper[4809]: I0930 00:33:28.297431 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-qttpq"] Sep 30 00:33:28 crc kubenswrapper[4809]: I0930 00:33:28.396070 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-5gbcd"] Sep 30 00:33:28 crc kubenswrapper[4809]: E0930 00:33:28.396688 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c23e442a-7748-4399-ae49-d374767800ef" containerName="extract-utilities" Sep 30 00:33:28 crc kubenswrapper[4809]: I0930 00:33:28.396708 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="c23e442a-7748-4399-ae49-d374767800ef" containerName="extract-utilities" Sep 30 00:33:28 crc kubenswrapper[4809]: E0930 00:33:28.396729 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c23e442a-7748-4399-ae49-d374767800ef" containerName="extract-content" Sep 30 00:33:28 crc kubenswrapper[4809]: I0930 00:33:28.396738 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="c23e442a-7748-4399-ae49-d374767800ef" containerName="extract-content" Sep 30 00:33:28 crc kubenswrapper[4809]: E0930 00:33:28.396796 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c23e442a-7748-4399-ae49-d374767800ef" containerName="registry-server" Sep 30 00:33:28 crc kubenswrapper[4809]: I0930 00:33:28.396804 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="c23e442a-7748-4399-ae49-d374767800ef" containerName="registry-server" Sep 30 00:33:28 crc kubenswrapper[4809]: I0930 00:33:28.397059 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="c23e442a-7748-4399-ae49-d374767800ef" containerName="registry-server" Sep 30 00:33:28 crc kubenswrapper[4809]: I0930 00:33:28.397878 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-5gbcd" Sep 30 00:33:28 crc kubenswrapper[4809]: I0930 00:33:28.402999 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feb60974-e32b-4f1e-bcd2-2647c3dc05eb-config-data\") pod \"heat-db-sync-5gbcd\" (UID: \"feb60974-e32b-4f1e-bcd2-2647c3dc05eb\") " pod="openstack/heat-db-sync-5gbcd" Sep 30 00:33:28 crc kubenswrapper[4809]: I0930 00:33:28.403060 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feb60974-e32b-4f1e-bcd2-2647c3dc05eb-combined-ca-bundle\") pod \"heat-db-sync-5gbcd\" (UID: \"feb60974-e32b-4f1e-bcd2-2647c3dc05eb\") " pod="openstack/heat-db-sync-5gbcd" Sep 30 00:33:28 crc kubenswrapper[4809]: I0930 00:33:28.403145 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sc5l9\" (UniqueName: \"kubernetes.io/projected/feb60974-e32b-4f1e-bcd2-2647c3dc05eb-kube-api-access-sc5l9\") pod \"heat-db-sync-5gbcd\" (UID: \"feb60974-e32b-4f1e-bcd2-2647c3dc05eb\") " pod="openstack/heat-db-sync-5gbcd" Sep 30 00:33:28 crc kubenswrapper[4809]: I0930 00:33:28.427397 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-5gbcd"] Sep 30 00:33:28 crc kubenswrapper[4809]: I0930 00:33:28.504869 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sc5l9\" (UniqueName: \"kubernetes.io/projected/feb60974-e32b-4f1e-bcd2-2647c3dc05eb-kube-api-access-sc5l9\") pod \"heat-db-sync-5gbcd\" (UID: \"feb60974-e32b-4f1e-bcd2-2647c3dc05eb\") " pod="openstack/heat-db-sync-5gbcd" Sep 30 00:33:28 crc kubenswrapper[4809]: I0930 00:33:28.505236 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feb60974-e32b-4f1e-bcd2-2647c3dc05eb-config-data\") pod \"heat-db-sync-5gbcd\" (UID: \"feb60974-e32b-4f1e-bcd2-2647c3dc05eb\") " pod="openstack/heat-db-sync-5gbcd" Sep 30 00:33:28 crc kubenswrapper[4809]: I0930 00:33:28.505359 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feb60974-e32b-4f1e-bcd2-2647c3dc05eb-combined-ca-bundle\") pod \"heat-db-sync-5gbcd\" (UID: \"feb60974-e32b-4f1e-bcd2-2647c3dc05eb\") " pod="openstack/heat-db-sync-5gbcd" Sep 30 00:33:28 crc kubenswrapper[4809]: I0930 00:33:28.509437 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feb60974-e32b-4f1e-bcd2-2647c3dc05eb-config-data\") pod \"heat-db-sync-5gbcd\" (UID: \"feb60974-e32b-4f1e-bcd2-2647c3dc05eb\") " pod="openstack/heat-db-sync-5gbcd" Sep 30 00:33:28 crc kubenswrapper[4809]: I0930 00:33:28.510004 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feb60974-e32b-4f1e-bcd2-2647c3dc05eb-combined-ca-bundle\") pod \"heat-db-sync-5gbcd\" (UID: \"feb60974-e32b-4f1e-bcd2-2647c3dc05eb\") " pod="openstack/heat-db-sync-5gbcd" Sep 30 00:33:28 crc kubenswrapper[4809]: I0930 00:33:28.526961 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sc5l9\" (UniqueName: \"kubernetes.io/projected/feb60974-e32b-4f1e-bcd2-2647c3dc05eb-kube-api-access-sc5l9\") pod \"heat-db-sync-5gbcd\" (UID: \"feb60974-e32b-4f1e-bcd2-2647c3dc05eb\") " pod="openstack/heat-db-sync-5gbcd" Sep 30 00:33:28 crc kubenswrapper[4809]: I0930 00:33:28.726833 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-5gbcd" Sep 30 00:33:29 crc kubenswrapper[4809]: I0930 00:33:29.230459 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-5gbcd"] Sep 30 00:33:29 crc kubenswrapper[4809]: I0930 00:33:29.343977 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-5gbcd" event={"ID":"feb60974-e32b-4f1e-bcd2-2647c3dc05eb","Type":"ContainerStarted","Data":"0242797c6cbaca92baeaae28519c51c4214cb1b60c24ddd537dc2532080ad30f"} Sep 30 00:33:29 crc kubenswrapper[4809]: I0930 00:33:29.709460 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23ff9291-ad89-46a6-9d4a-adf9e545adb2" path="/var/lib/kubelet/pods/23ff9291-ad89-46a6-9d4a-adf9e545adb2/volumes" Sep 30 00:33:29 crc kubenswrapper[4809]: I0930 00:33:29.841698 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qx48m"] Sep 30 00:33:29 crc kubenswrapper[4809]: I0930 00:33:29.844133 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qx48m" Sep 30 00:33:29 crc kubenswrapper[4809]: I0930 00:33:29.851939 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qx48m"] Sep 30 00:33:29 crc kubenswrapper[4809]: I0930 00:33:29.970048 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nd2gn\" (UniqueName: \"kubernetes.io/projected/9dda2bf4-170e-4605-a1fa-2cac07031e03-kube-api-access-nd2gn\") pod \"certified-operators-qx48m\" (UID: \"9dda2bf4-170e-4605-a1fa-2cac07031e03\") " pod="openshift-marketplace/certified-operators-qx48m" Sep 30 00:33:29 crc kubenswrapper[4809]: I0930 00:33:29.970226 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9dda2bf4-170e-4605-a1fa-2cac07031e03-utilities\") pod \"certified-operators-qx48m\" (UID: \"9dda2bf4-170e-4605-a1fa-2cac07031e03\") " pod="openshift-marketplace/certified-operators-qx48m" Sep 30 00:33:29 crc kubenswrapper[4809]: I0930 00:33:29.970253 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9dda2bf4-170e-4605-a1fa-2cac07031e03-catalog-content\") pod \"certified-operators-qx48m\" (UID: \"9dda2bf4-170e-4605-a1fa-2cac07031e03\") " pod="openshift-marketplace/certified-operators-qx48m" Sep 30 00:33:30 crc kubenswrapper[4809]: I0930 00:33:30.072210 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nd2gn\" (UniqueName: \"kubernetes.io/projected/9dda2bf4-170e-4605-a1fa-2cac07031e03-kube-api-access-nd2gn\") pod \"certified-operators-qx48m\" (UID: \"9dda2bf4-170e-4605-a1fa-2cac07031e03\") " pod="openshift-marketplace/certified-operators-qx48m" Sep 30 00:33:30 crc kubenswrapper[4809]: I0930 00:33:30.072313 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9dda2bf4-170e-4605-a1fa-2cac07031e03-utilities\") pod \"certified-operators-qx48m\" (UID: \"9dda2bf4-170e-4605-a1fa-2cac07031e03\") " pod="openshift-marketplace/certified-operators-qx48m" Sep 30 00:33:30 crc kubenswrapper[4809]: I0930 00:33:30.072341 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9dda2bf4-170e-4605-a1fa-2cac07031e03-catalog-content\") pod \"certified-operators-qx48m\" (UID: \"9dda2bf4-170e-4605-a1fa-2cac07031e03\") " pod="openshift-marketplace/certified-operators-qx48m" Sep 30 00:33:30 crc kubenswrapper[4809]: I0930 00:33:30.072849 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9dda2bf4-170e-4605-a1fa-2cac07031e03-catalog-content\") pod \"certified-operators-qx48m\" (UID: \"9dda2bf4-170e-4605-a1fa-2cac07031e03\") " pod="openshift-marketplace/certified-operators-qx48m" Sep 30 00:33:30 crc kubenswrapper[4809]: I0930 00:33:30.072924 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9dda2bf4-170e-4605-a1fa-2cac07031e03-utilities\") pod \"certified-operators-qx48m\" (UID: \"9dda2bf4-170e-4605-a1fa-2cac07031e03\") " pod="openshift-marketplace/certified-operators-qx48m" Sep 30 00:33:30 crc kubenswrapper[4809]: I0930 00:33:30.099627 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nd2gn\" (UniqueName: \"kubernetes.io/projected/9dda2bf4-170e-4605-a1fa-2cac07031e03-kube-api-access-nd2gn\") pod \"certified-operators-qx48m\" (UID: \"9dda2bf4-170e-4605-a1fa-2cac07031e03\") " pod="openshift-marketplace/certified-operators-qx48m" Sep 30 00:33:30 crc kubenswrapper[4809]: I0930 00:33:30.174980 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qx48m" Sep 30 00:33:30 crc kubenswrapper[4809]: I0930 00:33:30.811521 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:33:30 crc kubenswrapper[4809]: I0930 00:33:30.812238 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6afe7005-5d90-43f2-9d21-e3202ab493dd" containerName="ceilometer-central-agent" containerID="cri-o://ef8e96a2afaab6d0ce644b7dda6bac6a02cf54138f86cbfc72633ba07876ee4c" gracePeriod=30 Sep 30 00:33:30 crc kubenswrapper[4809]: I0930 00:33:30.812399 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6afe7005-5d90-43f2-9d21-e3202ab493dd" containerName="proxy-httpd" containerID="cri-o://b139ac950e4b535e14fc9622b049dd0b9b87d2acdf42c4ae2271588a128a33f3" gracePeriod=30 Sep 30 00:33:30 crc kubenswrapper[4809]: I0930 00:33:30.812450 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6afe7005-5d90-43f2-9d21-e3202ab493dd" containerName="sg-core" containerID="cri-o://18861d97d4e22518cf163cb651fd58878379a81cdc1e33b642352dd7317a94ed" gracePeriod=30 Sep 30 00:33:30 crc kubenswrapper[4809]: I0930 00:33:30.812485 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6afe7005-5d90-43f2-9d21-e3202ab493dd" containerName="ceilometer-notification-agent" containerID="cri-o://e5878f5e48946bdd6b749df99dbceb163e1dc4c7dceae7e04efa4d9b99ca1184" gracePeriod=30 Sep 30 00:33:30 crc kubenswrapper[4809]: I0930 00:33:30.848423 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qx48m"] Sep 30 00:33:30 crc kubenswrapper[4809]: W0930 00:33:30.866160 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9dda2bf4_170e_4605_a1fa_2cac07031e03.slice/crio-d596cfa21c7deb89ead4ee7044e190ac21368c9793211728e7e5f0bfe965099e WatchSource:0}: Error finding container d596cfa21c7deb89ead4ee7044e190ac21368c9793211728e7e5f0bfe965099e: Status 404 returned error can't find the container with id d596cfa21c7deb89ead4ee7044e190ac21368c9793211728e7e5f0bfe965099e Sep 30 00:33:31 crc kubenswrapper[4809]: I0930 00:33:31.277892 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 00:33:31 crc kubenswrapper[4809]: I0930 00:33:31.389737 4809 generic.go:334] "Generic (PLEG): container finished" podID="6afe7005-5d90-43f2-9d21-e3202ab493dd" containerID="b139ac950e4b535e14fc9622b049dd0b9b87d2acdf42c4ae2271588a128a33f3" exitCode=0 Sep 30 00:33:31 crc kubenswrapper[4809]: I0930 00:33:31.390027 4809 generic.go:334] "Generic (PLEG): container finished" podID="6afe7005-5d90-43f2-9d21-e3202ab493dd" containerID="18861d97d4e22518cf163cb651fd58878379a81cdc1e33b642352dd7317a94ed" exitCode=2 Sep 30 00:33:31 crc kubenswrapper[4809]: I0930 00:33:31.390036 4809 generic.go:334] "Generic (PLEG): container finished" podID="6afe7005-5d90-43f2-9d21-e3202ab493dd" containerID="ef8e96a2afaab6d0ce644b7dda6bac6a02cf54138f86cbfc72633ba07876ee4c" exitCode=0 Sep 30 00:33:31 crc kubenswrapper[4809]: I0930 00:33:31.389815 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6afe7005-5d90-43f2-9d21-e3202ab493dd","Type":"ContainerDied","Data":"b139ac950e4b535e14fc9622b049dd0b9b87d2acdf42c4ae2271588a128a33f3"} Sep 30 00:33:31 crc kubenswrapper[4809]: I0930 00:33:31.390102 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6afe7005-5d90-43f2-9d21-e3202ab493dd","Type":"ContainerDied","Data":"18861d97d4e22518cf163cb651fd58878379a81cdc1e33b642352dd7317a94ed"} Sep 30 00:33:31 crc kubenswrapper[4809]: I0930 00:33:31.390118 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6afe7005-5d90-43f2-9d21-e3202ab493dd","Type":"ContainerDied","Data":"ef8e96a2afaab6d0ce644b7dda6bac6a02cf54138f86cbfc72633ba07876ee4c"} Sep 30 00:33:31 crc kubenswrapper[4809]: I0930 00:33:31.394990 4809 generic.go:334] "Generic (PLEG): container finished" podID="9dda2bf4-170e-4605-a1fa-2cac07031e03" containerID="b6e929064a09964da3df1b0423b9717f24483472e6209194896fc981b18561d6" exitCode=0 Sep 30 00:33:31 crc kubenswrapper[4809]: I0930 00:33:31.395709 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qx48m" event={"ID":"9dda2bf4-170e-4605-a1fa-2cac07031e03","Type":"ContainerDied","Data":"b6e929064a09964da3df1b0423b9717f24483472e6209194896fc981b18561d6"} Sep 30 00:33:31 crc kubenswrapper[4809]: I0930 00:33:31.395744 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qx48m" event={"ID":"9dda2bf4-170e-4605-a1fa-2cac07031e03","Type":"ContainerStarted","Data":"d596cfa21c7deb89ead4ee7044e190ac21368c9793211728e7e5f0bfe965099e"} Sep 30 00:33:32 crc kubenswrapper[4809]: I0930 00:33:32.112622 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 00:33:32 crc kubenswrapper[4809]: I0930 00:33:32.410748 4809 generic.go:334] "Generic (PLEG): container finished" podID="6afe7005-5d90-43f2-9d21-e3202ab493dd" containerID="e5878f5e48946bdd6b749df99dbceb163e1dc4c7dceae7e04efa4d9b99ca1184" exitCode=0 Sep 30 00:33:32 crc kubenswrapper[4809]: I0930 00:33:32.410834 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6afe7005-5d90-43f2-9d21-e3202ab493dd","Type":"ContainerDied","Data":"e5878f5e48946bdd6b749df99dbceb163e1dc4c7dceae7e04efa4d9b99ca1184"} Sep 30 00:33:32 crc kubenswrapper[4809]: I0930 00:33:32.417102 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qx48m" event={"ID":"9dda2bf4-170e-4605-a1fa-2cac07031e03","Type":"ContainerStarted","Data":"f41190c02a52f314afddc833ca0b3f0c17e12a3ff9107e29ef1ef16199706ae9"} Sep 30 00:33:32 crc kubenswrapper[4809]: I0930 00:33:32.819853 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:33:32 crc kubenswrapper[4809]: I0930 00:33:32.946837 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6afe7005-5d90-43f2-9d21-e3202ab493dd-run-httpd\") pod \"6afe7005-5d90-43f2-9d21-e3202ab493dd\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " Sep 30 00:33:32 crc kubenswrapper[4809]: I0930 00:33:32.946951 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-combined-ca-bundle\") pod \"6afe7005-5d90-43f2-9d21-e3202ab493dd\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " Sep 30 00:33:32 crc kubenswrapper[4809]: I0930 00:33:32.947010 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6afe7005-5d90-43f2-9d21-e3202ab493dd-log-httpd\") pod \"6afe7005-5d90-43f2-9d21-e3202ab493dd\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " Sep 30 00:33:32 crc kubenswrapper[4809]: I0930 00:33:32.947120 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jwgv\" (UniqueName: \"kubernetes.io/projected/6afe7005-5d90-43f2-9d21-e3202ab493dd-kube-api-access-7jwgv\") pod \"6afe7005-5d90-43f2-9d21-e3202ab493dd\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " Sep 30 00:33:32 crc kubenswrapper[4809]: I0930 00:33:32.947148 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-sg-core-conf-yaml\") pod \"6afe7005-5d90-43f2-9d21-e3202ab493dd\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " Sep 30 00:33:32 crc kubenswrapper[4809]: I0930 00:33:32.947174 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-scripts\") pod \"6afe7005-5d90-43f2-9d21-e3202ab493dd\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " Sep 30 00:33:32 crc kubenswrapper[4809]: I0930 00:33:32.947209 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-ceilometer-tls-certs\") pod \"6afe7005-5d90-43f2-9d21-e3202ab493dd\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " Sep 30 00:33:32 crc kubenswrapper[4809]: I0930 00:33:32.947308 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-config-data\") pod \"6afe7005-5d90-43f2-9d21-e3202ab493dd\" (UID: \"6afe7005-5d90-43f2-9d21-e3202ab493dd\") " Sep 30 00:33:32 crc kubenswrapper[4809]: I0930 00:33:32.948171 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6afe7005-5d90-43f2-9d21-e3202ab493dd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6afe7005-5d90-43f2-9d21-e3202ab493dd" (UID: "6afe7005-5d90-43f2-9d21-e3202ab493dd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:33:32 crc kubenswrapper[4809]: I0930 00:33:32.950052 4809 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6afe7005-5d90-43f2-9d21-e3202ab493dd-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:32 crc kubenswrapper[4809]: I0930 00:33:32.953386 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-scripts" (OuterVolumeSpecName: "scripts") pod "6afe7005-5d90-43f2-9d21-e3202ab493dd" (UID: "6afe7005-5d90-43f2-9d21-e3202ab493dd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:33:32 crc kubenswrapper[4809]: I0930 00:33:32.953827 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6afe7005-5d90-43f2-9d21-e3202ab493dd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6afe7005-5d90-43f2-9d21-e3202ab493dd" (UID: "6afe7005-5d90-43f2-9d21-e3202ab493dd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:33:32 crc kubenswrapper[4809]: I0930 00:33:32.975271 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6afe7005-5d90-43f2-9d21-e3202ab493dd-kube-api-access-7jwgv" (OuterVolumeSpecName: "kube-api-access-7jwgv") pod "6afe7005-5d90-43f2-9d21-e3202ab493dd" (UID: "6afe7005-5d90-43f2-9d21-e3202ab493dd"). InnerVolumeSpecName "kube-api-access-7jwgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.031467 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "6afe7005-5d90-43f2-9d21-e3202ab493dd" (UID: "6afe7005-5d90-43f2-9d21-e3202ab493dd"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.058257 4809 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6afe7005-5d90-43f2-9d21-e3202ab493dd-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.058289 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jwgv\" (UniqueName: \"kubernetes.io/projected/6afe7005-5d90-43f2-9d21-e3202ab493dd-kube-api-access-7jwgv\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.058298 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.058307 4809 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.087924 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6afe7005-5d90-43f2-9d21-e3202ab493dd" (UID: "6afe7005-5d90-43f2-9d21-e3202ab493dd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.095866 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-config-data" (OuterVolumeSpecName: "config-data") pod "6afe7005-5d90-43f2-9d21-e3202ab493dd" (UID: "6afe7005-5d90-43f2-9d21-e3202ab493dd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.125259 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6afe7005-5d90-43f2-9d21-e3202ab493dd" (UID: "6afe7005-5d90-43f2-9d21-e3202ab493dd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.160604 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.160670 4809 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.160682 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6afe7005-5d90-43f2-9d21-e3202ab493dd-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.467855 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.468358 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6afe7005-5d90-43f2-9d21-e3202ab493dd","Type":"ContainerDied","Data":"1fb6b795525a718cece848c3a1caf802b9d24694ec56326089523461455d62d4"} Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.468475 4809 scope.go:117] "RemoveContainer" containerID="b139ac950e4b535e14fc9622b049dd0b9b87d2acdf42c4ae2271588a128a33f3" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.555290 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.574700 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.591423 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:33:33 crc kubenswrapper[4809]: E0930 00:33:33.591861 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6afe7005-5d90-43f2-9d21-e3202ab493dd" containerName="sg-core" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.591879 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="6afe7005-5d90-43f2-9d21-e3202ab493dd" containerName="sg-core" Sep 30 00:33:33 crc kubenswrapper[4809]: E0930 00:33:33.591912 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6afe7005-5d90-43f2-9d21-e3202ab493dd" containerName="proxy-httpd" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.591918 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="6afe7005-5d90-43f2-9d21-e3202ab493dd" containerName="proxy-httpd" Sep 30 00:33:33 crc kubenswrapper[4809]: E0930 00:33:33.591928 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6afe7005-5d90-43f2-9d21-e3202ab493dd" containerName="ceilometer-notification-agent" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.591934 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="6afe7005-5d90-43f2-9d21-e3202ab493dd" containerName="ceilometer-notification-agent" Sep 30 00:33:33 crc kubenswrapper[4809]: E0930 00:33:33.591952 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6afe7005-5d90-43f2-9d21-e3202ab493dd" containerName="ceilometer-central-agent" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.591959 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="6afe7005-5d90-43f2-9d21-e3202ab493dd" containerName="ceilometer-central-agent" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.592142 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="6afe7005-5d90-43f2-9d21-e3202ab493dd" containerName="sg-core" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.592161 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="6afe7005-5d90-43f2-9d21-e3202ab493dd" containerName="ceilometer-notification-agent" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.592181 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="6afe7005-5d90-43f2-9d21-e3202ab493dd" containerName="ceilometer-central-agent" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.592191 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="6afe7005-5d90-43f2-9d21-e3202ab493dd" containerName="proxy-httpd" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.592827 4809 scope.go:117] "RemoveContainer" containerID="18861d97d4e22518cf163cb651fd58878379a81cdc1e33b642352dd7317a94ed" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.594042 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.595548 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.597469 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.597620 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.625967 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.655635 4809 scope.go:117] "RemoveContainer" containerID="e5878f5e48946bdd6b749df99dbceb163e1dc4c7dceae7e04efa4d9b99ca1184" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.711630 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6afe7005-5d90-43f2-9d21-e3202ab493dd" path="/var/lib/kubelet/pods/6afe7005-5d90-43f2-9d21-e3202ab493dd/volumes" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.746166 4809 scope.go:117] "RemoveContainer" containerID="ef8e96a2afaab6d0ce644b7dda6bac6a02cf54138f86cbfc72633ba07876ee4c" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.775582 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-run-httpd\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.775617 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.775738 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-scripts\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.775756 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.775807 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.775832 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrn4l\" (UniqueName: \"kubernetes.io/projected/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-kube-api-access-rrn4l\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.775851 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-config-data\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.775881 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-log-httpd\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.877583 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrn4l\" (UniqueName: \"kubernetes.io/projected/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-kube-api-access-rrn4l\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.877638 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-config-data\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.877684 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-log-httpd\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.877725 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-run-httpd\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.877743 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.877847 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-scripts\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.877865 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.877909 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.878827 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-run-httpd\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.879205 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-log-httpd\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.882463 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.882481 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.882684 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-scripts\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.884214 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.885462 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-config-data\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.900210 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrn4l\" (UniqueName: \"kubernetes.io/projected/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-kube-api-access-rrn4l\") pod \"ceilometer-0\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " pod="openstack/ceilometer-0" Sep 30 00:33:33 crc kubenswrapper[4809]: I0930 00:33:33.920562 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:33:34 crc kubenswrapper[4809]: I0930 00:33:34.489690 4809 generic.go:334] "Generic (PLEG): container finished" podID="9dda2bf4-170e-4605-a1fa-2cac07031e03" containerID="f41190c02a52f314afddc833ca0b3f0c17e12a3ff9107e29ef1ef16199706ae9" exitCode=0 Sep 30 00:33:34 crc kubenswrapper[4809]: I0930 00:33:34.489821 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qx48m" event={"ID":"9dda2bf4-170e-4605-a1fa-2cac07031e03","Type":"ContainerDied","Data":"f41190c02a52f314afddc833ca0b3f0c17e12a3ff9107e29ef1ef16199706ae9"} Sep 30 00:33:34 crc kubenswrapper[4809]: I0930 00:33:34.567548 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:33:35 crc kubenswrapper[4809]: I0930 00:33:35.558660 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1","Type":"ContainerStarted","Data":"6dd4f5074ad20ab986161a066c78ce7cfd0c78eca82fc2ed64bf91b48eb31b8c"} Sep 30 00:33:36 crc kubenswrapper[4809]: I0930 00:33:36.193447 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="d7616359-f18a-4fba-b35a-327e65a0c05d" containerName="rabbitmq" containerID="cri-o://1fb99b015248034a11488608f3491637cb199733b5aecc2399cc1298b976af4b" gracePeriod=604796 Sep 30 00:33:36 crc kubenswrapper[4809]: I0930 00:33:36.580020 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qx48m" event={"ID":"9dda2bf4-170e-4605-a1fa-2cac07031e03","Type":"ContainerStarted","Data":"b66073b6d12710a2497a7bec91216aec58b4a6052ac8b4b3aaae613ef6672a81"} Sep 30 00:33:37 crc kubenswrapper[4809]: I0930 00:33:37.132500 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="095a7ca7-bda1-498c-8a6d-16de67eb0a70" containerName="rabbitmq" containerID="cri-o://6f5b6d99440ced337d2ecb0484994816df37c9fafccfc3fbb4ee0f9f01f0fef3" gracePeriod=604795 Sep 30 00:33:37 crc kubenswrapper[4809]: I0930 00:33:37.478253 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="d7616359-f18a-4fba-b35a-327e65a0c05d" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.120:5671: connect: connection refused" Sep 30 00:33:37 crc kubenswrapper[4809]: I0930 00:33:37.822979 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="095a7ca7-bda1-498c-8a6d-16de67eb0a70" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.121:5671: connect: connection refused" Sep 30 00:33:39 crc kubenswrapper[4809]: I0930 00:33:39.727396 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qx48m" podStartSLOduration=6.875132497 podStartE2EDuration="10.727378575s" podCreationTimestamp="2025-09-30 00:33:29 +0000 UTC" firstStartedPulling="2025-09-30 00:33:31.396793104 +0000 UTC m=+1462.433042512" lastFinishedPulling="2025-09-30 00:33:35.249039182 +0000 UTC m=+1466.285288590" observedRunningTime="2025-09-30 00:33:36.60758975 +0000 UTC m=+1467.643839158" watchObservedRunningTime="2025-09-30 00:33:39.727378575 +0000 UTC m=+1470.763627983" Sep 30 00:33:40 crc kubenswrapper[4809]: I0930 00:33:40.175908 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qx48m" Sep 30 00:33:40 crc kubenswrapper[4809]: I0930 00:33:40.176197 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qx48m" Sep 30 00:33:41 crc kubenswrapper[4809]: I0930 00:33:41.284363 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-qx48m" podUID="9dda2bf4-170e-4605-a1fa-2cac07031e03" containerName="registry-server" probeResult="failure" output=< Sep 30 00:33:41 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 00:33:41 crc kubenswrapper[4809]: > Sep 30 00:33:42 crc kubenswrapper[4809]: I0930 00:33:42.658606 4809 generic.go:334] "Generic (PLEG): container finished" podID="d7616359-f18a-4fba-b35a-327e65a0c05d" containerID="1fb99b015248034a11488608f3491637cb199733b5aecc2399cc1298b976af4b" exitCode=0 Sep 30 00:33:42 crc kubenswrapper[4809]: I0930 00:33:42.658702 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d7616359-f18a-4fba-b35a-327e65a0c05d","Type":"ContainerDied","Data":"1fb99b015248034a11488608f3491637cb199733b5aecc2399cc1298b976af4b"} Sep 30 00:33:44 crc kubenswrapper[4809]: I0930 00:33:44.686756 4809 generic.go:334] "Generic (PLEG): container finished" podID="095a7ca7-bda1-498c-8a6d-16de67eb0a70" containerID="6f5b6d99440ced337d2ecb0484994816df37c9fafccfc3fbb4ee0f9f01f0fef3" exitCode=0 Sep 30 00:33:44 crc kubenswrapper[4809]: I0930 00:33:44.686921 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"095a7ca7-bda1-498c-8a6d-16de67eb0a70","Type":"ContainerDied","Data":"6f5b6d99440ced337d2ecb0484994816df37c9fafccfc3fbb4ee0f9f01f0fef3"} Sep 30 00:33:47 crc kubenswrapper[4809]: I0930 00:33:47.478306 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="d7616359-f18a-4fba-b35a-327e65a0c05d" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.120:5671: connect: connection refused" Sep 30 00:33:47 crc kubenswrapper[4809]: I0930 00:33:47.822848 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="095a7ca7-bda1-498c-8a6d-16de67eb0a70" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.121:5671: connect: connection refused" Sep 30 00:33:50 crc kubenswrapper[4809]: I0930 00:33:50.237432 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qx48m" Sep 30 00:33:50 crc kubenswrapper[4809]: I0930 00:33:50.285603 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qx48m" Sep 30 00:33:50 crc kubenswrapper[4809]: I0930 00:33:50.474571 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qx48m"] Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.553923 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-68df85789f-j6gch"] Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.561221 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.563467 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.571466 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68df85789f-j6gch"] Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.706996 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-config\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.707062 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkhzm\" (UniqueName: \"kubernetes.io/projected/14f049a6-d608-4fd3-84ac-4c264cdfde1f-kube-api-access-lkhzm\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.707143 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-ovsdbserver-sb\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.707169 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-dns-svc\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.707230 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-ovsdbserver-nb\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.707258 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-dns-swift-storage-0\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.707405 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-openstack-edpm-ipam\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.774159 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qx48m" podUID="9dda2bf4-170e-4605-a1fa-2cac07031e03" containerName="registry-server" containerID="cri-o://b66073b6d12710a2497a7bec91216aec58b4a6052ac8b4b3aaae613ef6672a81" gracePeriod=2 Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.810914 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-config\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.811111 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-config\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.811195 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkhzm\" (UniqueName: \"kubernetes.io/projected/14f049a6-d608-4fd3-84ac-4c264cdfde1f-kube-api-access-lkhzm\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.812708 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-ovsdbserver-sb\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.828112 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-ovsdbserver-sb\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.830220 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-dns-svc\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.830933 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-ovsdbserver-nb\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.830975 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-dns-swift-storage-0\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.831040 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-openstack-edpm-ipam\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.831254 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-dns-svc\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.831960 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-ovsdbserver-nb\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.831987 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-openstack-edpm-ipam\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.832626 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-dns-swift-storage-0\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.835981 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkhzm\" (UniqueName: \"kubernetes.io/projected/14f049a6-d608-4fd3-84ac-4c264cdfde1f-kube-api-access-lkhzm\") pod \"dnsmasq-dns-68df85789f-j6gch\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:51 crc kubenswrapper[4809]: I0930 00:33:51.933148 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:33:52 crc kubenswrapper[4809]: I0930 00:33:52.789166 4809 generic.go:334] "Generic (PLEG): container finished" podID="9dda2bf4-170e-4605-a1fa-2cac07031e03" containerID="b66073b6d12710a2497a7bec91216aec58b4a6052ac8b4b3aaae613ef6672a81" exitCode=0 Sep 30 00:33:52 crc kubenswrapper[4809]: I0930 00:33:52.789220 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qx48m" event={"ID":"9dda2bf4-170e-4605-a1fa-2cac07031e03","Type":"ContainerDied","Data":"b66073b6d12710a2497a7bec91216aec58b4a6052ac8b4b3aaae613ef6672a81"} Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.325820 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.326417 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.370289 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.380423 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.407788 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.407838 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6xh2\" (UniqueName: \"kubernetes.io/projected/095a7ca7-bda1-498c-8a6d-16de67eb0a70-kube-api-access-d6xh2\") pod \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.407859 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-erlang-cookie\") pod \"d7616359-f18a-4fba-b35a-327e65a0c05d\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.407895 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d7616359-f18a-4fba-b35a-327e65a0c05d-config-data\") pod \"d7616359-f18a-4fba-b35a-327e65a0c05d\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.407928 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/095a7ca7-bda1-498c-8a6d-16de67eb0a70-erlang-cookie-secret\") pod \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.407944 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-plugins\") pod \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.407960 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-plugins\") pod \"d7616359-f18a-4fba-b35a-327e65a0c05d\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.408007 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-erlang-cookie\") pod \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.408035 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/095a7ca7-bda1-498c-8a6d-16de67eb0a70-pod-info\") pod \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.408060 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/095a7ca7-bda1-498c-8a6d-16de67eb0a70-config-data\") pod \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.408088 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d7616359-f18a-4fba-b35a-327e65a0c05d-pod-info\") pod \"d7616359-f18a-4fba-b35a-327e65a0c05d\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.408103 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d7616359-f18a-4fba-b35a-327e65a0c05d-server-conf\") pod \"d7616359-f18a-4fba-b35a-327e65a0c05d\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.408122 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-tls\") pod \"d7616359-f18a-4fba-b35a-327e65a0c05d\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.408146 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/095a7ca7-bda1-498c-8a6d-16de67eb0a70-server-conf\") pod \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.408178 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"d7616359-f18a-4fba-b35a-327e65a0c05d\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.408194 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x25bj\" (UniqueName: \"kubernetes.io/projected/d7616359-f18a-4fba-b35a-327e65a0c05d-kube-api-access-x25bj\") pod \"d7616359-f18a-4fba-b35a-327e65a0c05d\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.408213 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-confd\") pod \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.408233 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-tls\") pod \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.408253 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d7616359-f18a-4fba-b35a-327e65a0c05d-plugins-conf\") pod \"d7616359-f18a-4fba-b35a-327e65a0c05d\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.408332 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-confd\") pod \"d7616359-f18a-4fba-b35a-327e65a0c05d\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.408358 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d7616359-f18a-4fba-b35a-327e65a0c05d-erlang-cookie-secret\") pod \"d7616359-f18a-4fba-b35a-327e65a0c05d\" (UID: \"d7616359-f18a-4fba-b35a-327e65a0c05d\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.408383 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/095a7ca7-bda1-498c-8a6d-16de67eb0a70-plugins-conf\") pod \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\" (UID: \"095a7ca7-bda1-498c-8a6d-16de67eb0a70\") " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.413613 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/095a7ca7-bda1-498c-8a6d-16de67eb0a70-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "095a7ca7-bda1-498c-8a6d-16de67eb0a70" (UID: "095a7ca7-bda1-498c-8a6d-16de67eb0a70"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.420781 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "d7616359-f18a-4fba-b35a-327e65a0c05d" (UID: "d7616359-f18a-4fba-b35a-327e65a0c05d"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.424312 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "d7616359-f18a-4fba-b35a-327e65a0c05d" (UID: "d7616359-f18a-4fba-b35a-327e65a0c05d"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.427390 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "095a7ca7-bda1-498c-8a6d-16de67eb0a70" (UID: "095a7ca7-bda1-498c-8a6d-16de67eb0a70"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.428152 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "d7616359-f18a-4fba-b35a-327e65a0c05d" (UID: "d7616359-f18a-4fba-b35a-327e65a0c05d"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.432581 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "095a7ca7-bda1-498c-8a6d-16de67eb0a70" (UID: "095a7ca7-bda1-498c-8a6d-16de67eb0a70"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.434777 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/095a7ca7-bda1-498c-8a6d-16de67eb0a70-kube-api-access-d6xh2" (OuterVolumeSpecName: "kube-api-access-d6xh2") pod "095a7ca7-bda1-498c-8a6d-16de67eb0a70" (UID: "095a7ca7-bda1-498c-8a6d-16de67eb0a70"). InnerVolumeSpecName "kube-api-access-d6xh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.436478 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7616359-f18a-4fba-b35a-327e65a0c05d-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "d7616359-f18a-4fba-b35a-327e65a0c05d" (UID: "d7616359-f18a-4fba-b35a-327e65a0c05d"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.442942 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "persistence") pod "095a7ca7-bda1-498c-8a6d-16de67eb0a70" (UID: "095a7ca7-bda1-498c-8a6d-16de67eb0a70"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.444817 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "095a7ca7-bda1-498c-8a6d-16de67eb0a70" (UID: "095a7ca7-bda1-498c-8a6d-16de67eb0a70"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.448906 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7616359-f18a-4fba-b35a-327e65a0c05d-kube-api-access-x25bj" (OuterVolumeSpecName: "kube-api-access-x25bj") pod "d7616359-f18a-4fba-b35a-327e65a0c05d" (UID: "d7616359-f18a-4fba-b35a-327e65a0c05d"). InnerVolumeSpecName "kube-api-access-x25bj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.456108 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/095a7ca7-bda1-498c-8a6d-16de67eb0a70-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "095a7ca7-bda1-498c-8a6d-16de67eb0a70" (UID: "095a7ca7-bda1-498c-8a6d-16de67eb0a70"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.478185 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7616359-f18a-4fba-b35a-327e65a0c05d-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "d7616359-f18a-4fba-b35a-327e65a0c05d" (UID: "d7616359-f18a-4fba-b35a-327e65a0c05d"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.501518 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "persistence") pod "d7616359-f18a-4fba-b35a-327e65a0c05d" (UID: "d7616359-f18a-4fba-b35a-327e65a0c05d"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.511007 4809 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/095a7ca7-bda1-498c-8a6d-16de67eb0a70-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.511040 4809 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.511050 4809 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.511089 4809 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.511103 4809 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.511134 4809 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.511148 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x25bj\" (UniqueName: \"kubernetes.io/projected/d7616359-f18a-4fba-b35a-327e65a0c05d-kube-api-access-x25bj\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.511161 4809 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.511171 4809 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d7616359-f18a-4fba-b35a-327e65a0c05d-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.511183 4809 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d7616359-f18a-4fba-b35a-327e65a0c05d-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.511192 4809 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/095a7ca7-bda1-498c-8a6d-16de67eb0a70-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.511209 4809 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.511217 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6xh2\" (UniqueName: \"kubernetes.io/projected/095a7ca7-bda1-498c-8a6d-16de67eb0a70-kube-api-access-d6xh2\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.511226 4809 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.515500 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/095a7ca7-bda1-498c-8a6d-16de67eb0a70-pod-info" (OuterVolumeSpecName: "pod-info") pod "095a7ca7-bda1-498c-8a6d-16de67eb0a70" (UID: "095a7ca7-bda1-498c-8a6d-16de67eb0a70"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.525109 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/d7616359-f18a-4fba-b35a-327e65a0c05d-pod-info" (OuterVolumeSpecName: "pod-info") pod "d7616359-f18a-4fba-b35a-327e65a0c05d" (UID: "d7616359-f18a-4fba-b35a-327e65a0c05d"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.581058 4809 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.620237 4809 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/095a7ca7-bda1-498c-8a6d-16de67eb0a70-pod-info\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.620277 4809 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d7616359-f18a-4fba-b35a-327e65a0c05d-pod-info\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.620293 4809 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.625392 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/095a7ca7-bda1-498c-8a6d-16de67eb0a70-config-data" (OuterVolumeSpecName: "config-data") pod "095a7ca7-bda1-498c-8a6d-16de67eb0a70" (UID: "095a7ca7-bda1-498c-8a6d-16de67eb0a70"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.626038 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7616359-f18a-4fba-b35a-327e65a0c05d-config-data" (OuterVolumeSpecName: "config-data") pod "d7616359-f18a-4fba-b35a-327e65a0c05d" (UID: "d7616359-f18a-4fba-b35a-327e65a0c05d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.666701 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/095a7ca7-bda1-498c-8a6d-16de67eb0a70-server-conf" (OuterVolumeSpecName: "server-conf") pod "095a7ca7-bda1-498c-8a6d-16de67eb0a70" (UID: "095a7ca7-bda1-498c-8a6d-16de67eb0a70"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.673808 4809 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.679448 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7616359-f18a-4fba-b35a-327e65a0c05d-server-conf" (OuterVolumeSpecName: "server-conf") pod "d7616359-f18a-4fba-b35a-327e65a0c05d" (UID: "d7616359-f18a-4fba-b35a-327e65a0c05d"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.730052 4809 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.730087 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d7616359-f18a-4fba-b35a-327e65a0c05d-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.730098 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/095a7ca7-bda1-498c-8a6d-16de67eb0a70-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.730107 4809 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d7616359-f18a-4fba-b35a-327e65a0c05d-server-conf\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.730115 4809 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/095a7ca7-bda1-498c-8a6d-16de67eb0a70-server-conf\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.787322 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "d7616359-f18a-4fba-b35a-327e65a0c05d" (UID: "d7616359-f18a-4fba-b35a-327e65a0c05d"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.789914 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "095a7ca7-bda1-498c-8a6d-16de67eb0a70" (UID: "095a7ca7-bda1-498c-8a6d-16de67eb0a70"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.830266 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.830260 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"095a7ca7-bda1-498c-8a6d-16de67eb0a70","Type":"ContainerDied","Data":"1dce1b12e97c7a8f03bf135e48c0d43730447bb5f5046719c62ff83fd7f50975"} Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.830725 4809 scope.go:117] "RemoveContainer" containerID="6f5b6d99440ced337d2ecb0484994816df37c9fafccfc3fbb4ee0f9f01f0fef3" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.833135 4809 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/095a7ca7-bda1-498c-8a6d-16de67eb0a70-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.833163 4809 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d7616359-f18a-4fba-b35a-327e65a0c05d-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.834875 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d7616359-f18a-4fba-b35a-327e65a0c05d","Type":"ContainerDied","Data":"0d604000bc6cf32c7c0e38df887ee431cdaae3de4e04c750bd69da96f802c6f3"} Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.834987 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.878707 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.905697 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.953870 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.971026 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.985044 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 00:33:55 crc kubenswrapper[4809]: E0930 00:33:55.985431 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="095a7ca7-bda1-498c-8a6d-16de67eb0a70" containerName="rabbitmq" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.985447 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="095a7ca7-bda1-498c-8a6d-16de67eb0a70" containerName="rabbitmq" Sep 30 00:33:55 crc kubenswrapper[4809]: E0930 00:33:55.985479 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="095a7ca7-bda1-498c-8a6d-16de67eb0a70" containerName="setup-container" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.985487 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="095a7ca7-bda1-498c-8a6d-16de67eb0a70" containerName="setup-container" Sep 30 00:33:55 crc kubenswrapper[4809]: E0930 00:33:55.985501 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7616359-f18a-4fba-b35a-327e65a0c05d" containerName="setup-container" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.985507 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7616359-f18a-4fba-b35a-327e65a0c05d" containerName="setup-container" Sep 30 00:33:55 crc kubenswrapper[4809]: E0930 00:33:55.985515 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7616359-f18a-4fba-b35a-327e65a0c05d" containerName="rabbitmq" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.985521 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7616359-f18a-4fba-b35a-327e65a0c05d" containerName="rabbitmq" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.985877 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7616359-f18a-4fba-b35a-327e65a0c05d" containerName="rabbitmq" Sep 30 00:33:55 crc kubenswrapper[4809]: I0930 00:33:55.985898 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="095a7ca7-bda1-498c-8a6d-16de67eb0a70" containerName="rabbitmq" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.002555 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.003993 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.004073 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.004427 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.012137 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.012163 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.012170 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.012197 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.012293 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.012462 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.012848 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.012877 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.013293 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.013469 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.013471 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.020263 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.020855 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-k8hm5" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.021335 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-zh24w" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.021435 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.142221 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/69e69cac-c659-4973-a220-82c222df7c35-config-data\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.142304 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/69e69cac-c659-4973-a220-82c222df7c35-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.142397 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3e11f1c6-6535-453c-86b9-98c1ba7abd72-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.142434 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3e11f1c6-6535-453c-86b9-98c1ba7abd72-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.142452 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkbgp\" (UniqueName: \"kubernetes.io/projected/69e69cac-c659-4973-a220-82c222df7c35-kube-api-access-kkbgp\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.142520 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.142590 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/69e69cac-c659-4973-a220-82c222df7c35-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.142741 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3e11f1c6-6535-453c-86b9-98c1ba7abd72-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.142775 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3e11f1c6-6535-453c-86b9-98c1ba7abd72-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.142829 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3e11f1c6-6535-453c-86b9-98c1ba7abd72-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.142862 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3e11f1c6-6535-453c-86b9-98c1ba7abd72-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.142888 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.142907 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3e11f1c6-6535-453c-86b9-98c1ba7abd72-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.142922 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/69e69cac-c659-4973-a220-82c222df7c35-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.142937 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/69e69cac-c659-4973-a220-82c222df7c35-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.142956 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/69e69cac-c659-4973-a220-82c222df7c35-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.142981 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/69e69cac-c659-4973-a220-82c222df7c35-server-conf\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.143010 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/69e69cac-c659-4973-a220-82c222df7c35-pod-info\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.143047 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3e11f1c6-6535-453c-86b9-98c1ba7abd72-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.143064 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/69e69cac-c659-4973-a220-82c222df7c35-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.143082 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3e11f1c6-6535-453c-86b9-98c1ba7abd72-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.143130 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-274hc\" (UniqueName: \"kubernetes.io/projected/3e11f1c6-6535-453c-86b9-98c1ba7abd72-kube-api-access-274hc\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: E0930 00:33:56.202802 4809 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested" Sep 30 00:33:56 crc kubenswrapper[4809]: E0930 00:33:56.202877 4809 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested" Sep 30 00:33:56 crc kubenswrapper[4809]: E0930 00:33:56.203042 4809 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5fdhcchb7hc5h58fh5d5hd4h54bhf9hcchc6h5c8h564h64dh646h6dhb6hf5h7fh4h566h686h55fh5fbhbh58chfdh6h586h54h94hb9q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rrn4l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.244396 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.244857 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/69e69cac-c659-4973-a220-82c222df7c35-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.244884 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3e11f1c6-6535-453c-86b9-98c1ba7abd72-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.244899 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3e11f1c6-6535-453c-86b9-98c1ba7abd72-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.244924 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3e11f1c6-6535-453c-86b9-98c1ba7abd72-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.244947 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3e11f1c6-6535-453c-86b9-98c1ba7abd72-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.244971 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.244992 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3e11f1c6-6535-453c-86b9-98c1ba7abd72-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.245007 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/69e69cac-c659-4973-a220-82c222df7c35-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.245023 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/69e69cac-c659-4973-a220-82c222df7c35-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.245040 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/69e69cac-c659-4973-a220-82c222df7c35-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.245093 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/69e69cac-c659-4973-a220-82c222df7c35-server-conf\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.245122 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/69e69cac-c659-4973-a220-82c222df7c35-pod-info\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.245160 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3e11f1c6-6535-453c-86b9-98c1ba7abd72-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.245179 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/69e69cac-c659-4973-a220-82c222df7c35-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.245199 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3e11f1c6-6535-453c-86b9-98c1ba7abd72-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.245241 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-274hc\" (UniqueName: \"kubernetes.io/projected/3e11f1c6-6535-453c-86b9-98c1ba7abd72-kube-api-access-274hc\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.245269 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/69e69cac-c659-4973-a220-82c222df7c35-config-data\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.245298 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/69e69cac-c659-4973-a220-82c222df7c35-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.245314 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3e11f1c6-6535-453c-86b9-98c1ba7abd72-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.245332 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3e11f1c6-6535-453c-86b9-98c1ba7abd72-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.245345 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkbgp\" (UniqueName: \"kubernetes.io/projected/69e69cac-c659-4973-a220-82c222df7c35-kube-api-access-kkbgp\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.245890 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.251468 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3e11f1c6-6535-453c-86b9-98c1ba7abd72-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.251563 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.252136 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/69e69cac-c659-4973-a220-82c222df7c35-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.253202 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/69e69cac-c659-4973-a220-82c222df7c35-server-conf\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.253438 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/69e69cac-c659-4973-a220-82c222df7c35-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.253834 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3e11f1c6-6535-453c-86b9-98c1ba7abd72-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.253922 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/69e69cac-c659-4973-a220-82c222df7c35-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.254305 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3e11f1c6-6535-453c-86b9-98c1ba7abd72-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.255112 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3e11f1c6-6535-453c-86b9-98c1ba7abd72-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.255749 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/69e69cac-c659-4973-a220-82c222df7c35-config-data\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.256096 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3e11f1c6-6535-453c-86b9-98c1ba7abd72-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.258207 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/69e69cac-c659-4973-a220-82c222df7c35-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.258236 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3e11f1c6-6535-453c-86b9-98c1ba7abd72-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.258474 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/69e69cac-c659-4973-a220-82c222df7c35-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.258504 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3e11f1c6-6535-453c-86b9-98c1ba7abd72-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.258922 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3e11f1c6-6535-453c-86b9-98c1ba7abd72-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.260119 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/69e69cac-c659-4973-a220-82c222df7c35-pod-info\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.260964 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3e11f1c6-6535-453c-86b9-98c1ba7abd72-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.270752 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/69e69cac-c659-4973-a220-82c222df7c35-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.271461 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkbgp\" (UniqueName: \"kubernetes.io/projected/69e69cac-c659-4973-a220-82c222df7c35-kube-api-access-kkbgp\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.282062 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-274hc\" (UniqueName: \"kubernetes.io/projected/3e11f1c6-6535-453c-86b9-98c1ba7abd72-kube-api-access-274hc\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.308594 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"3e11f1c6-6535-453c-86b9-98c1ba7abd72\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.337305 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"69e69cac-c659-4973-a220-82c222df7c35\") " pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.348576 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.639738 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.697773 4809 scope.go:117] "RemoveContainer" containerID="743f47fb70d0bb64c618e80220574e87e303504d35695209df55120b5be8211a" Sep 30 00:33:56 crc kubenswrapper[4809]: E0930 00:33:56.709692 4809 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-heat-engine:current-tested" Sep 30 00:33:56 crc kubenswrapper[4809]: E0930 00:33:56.709744 4809 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-heat-engine:current-tested" Sep 30 00:33:56 crc kubenswrapper[4809]: E0930 00:33:56.709852 4809 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:heat-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-heat-engine:current-tested,Command:[/bin/bash],Args:[-c /usr/bin/heat-manage --config-dir /etc/heat/heat.conf.d db_sync],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/heat/heat.conf.d/00-default.conf,SubPath:00-default.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/heat/heat.conf.d/01-custom.conf,SubPath:01-custom.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sc5l9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42418,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42418,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-db-sync-5gbcd_openstack(feb60974-e32b-4f1e-bcd2-2647c3dc05eb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 00:33:56 crc kubenswrapper[4809]: E0930 00:33:56.710926 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/heat-db-sync-5gbcd" podUID="feb60974-e32b-4f1e-bcd2-2647c3dc05eb" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.785193 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qx48m" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.796608 4809 scope.go:117] "RemoveContainer" containerID="1fb99b015248034a11488608f3491637cb199733b5aecc2399cc1298b976af4b" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.883166 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qx48m" event={"ID":"9dda2bf4-170e-4605-a1fa-2cac07031e03","Type":"ContainerDied","Data":"d596cfa21c7deb89ead4ee7044e190ac21368c9793211728e7e5f0bfe965099e"} Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.883198 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qx48m" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.884312 4809 scope.go:117] "RemoveContainer" containerID="d069e4cc0bde9e03aa2c4113f1c7695e4fca7141970b029715da52511e304260" Sep 30 00:33:56 crc kubenswrapper[4809]: E0930 00:33:56.899937 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-heat-engine:current-tested\\\"\"" pod="openstack/heat-db-sync-5gbcd" podUID="feb60974-e32b-4f1e-bcd2-2647c3dc05eb" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.944040 4809 scope.go:117] "RemoveContainer" containerID="b66073b6d12710a2497a7bec91216aec58b4a6052ac8b4b3aaae613ef6672a81" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.965911 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9dda2bf4-170e-4605-a1fa-2cac07031e03-catalog-content\") pod \"9dda2bf4-170e-4605-a1fa-2cac07031e03\" (UID: \"9dda2bf4-170e-4605-a1fa-2cac07031e03\") " Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.965983 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9dda2bf4-170e-4605-a1fa-2cac07031e03-utilities\") pod \"9dda2bf4-170e-4605-a1fa-2cac07031e03\" (UID: \"9dda2bf4-170e-4605-a1fa-2cac07031e03\") " Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.966029 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nd2gn\" (UniqueName: \"kubernetes.io/projected/9dda2bf4-170e-4605-a1fa-2cac07031e03-kube-api-access-nd2gn\") pod \"9dda2bf4-170e-4605-a1fa-2cac07031e03\" (UID: \"9dda2bf4-170e-4605-a1fa-2cac07031e03\") " Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.967727 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9dda2bf4-170e-4605-a1fa-2cac07031e03-utilities" (OuterVolumeSpecName: "utilities") pod "9dda2bf4-170e-4605-a1fa-2cac07031e03" (UID: "9dda2bf4-170e-4605-a1fa-2cac07031e03"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.972026 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dda2bf4-170e-4605-a1fa-2cac07031e03-kube-api-access-nd2gn" (OuterVolumeSpecName: "kube-api-access-nd2gn") pod "9dda2bf4-170e-4605-a1fa-2cac07031e03" (UID: "9dda2bf4-170e-4605-a1fa-2cac07031e03"). InnerVolumeSpecName "kube-api-access-nd2gn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:33:56 crc kubenswrapper[4809]: I0930 00:33:56.985739 4809 scope.go:117] "RemoveContainer" containerID="f41190c02a52f314afddc833ca0b3f0c17e12a3ff9107e29ef1ef16199706ae9" Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.007598 4809 scope.go:117] "RemoveContainer" containerID="b6e929064a09964da3df1b0423b9717f24483472e6209194896fc981b18561d6" Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.011994 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9dda2bf4-170e-4605-a1fa-2cac07031e03-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9dda2bf4-170e-4605-a1fa-2cac07031e03" (UID: "9dda2bf4-170e-4605-a1fa-2cac07031e03"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.092593 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9dda2bf4-170e-4605-a1fa-2cac07031e03-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.092656 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9dda2bf4-170e-4605-a1fa-2cac07031e03-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.092671 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nd2gn\" (UniqueName: \"kubernetes.io/projected/9dda2bf4-170e-4605-a1fa-2cac07031e03-kube-api-access-nd2gn\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.252279 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68df85789f-j6gch"] Sep 30 00:33:57 crc kubenswrapper[4809]: W0930 00:33:57.263795 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14f049a6_d608_4fd3_84ac_4c264cdfde1f.slice/crio-9c58ee8013bf0301df3adffaeb2b1772bbe20a7ad8047633c848af0ce7b8599e WatchSource:0}: Error finding container 9c58ee8013bf0301df3adffaeb2b1772bbe20a7ad8047633c848af0ce7b8599e: Status 404 returned error can't find the container with id 9c58ee8013bf0301df3adffaeb2b1772bbe20a7ad8047633c848af0ce7b8599e Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.397976 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.433636 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.573377 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qx48m"] Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.585203 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qx48m"] Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.703142 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="095a7ca7-bda1-498c-8a6d-16de67eb0a70" path="/var/lib/kubelet/pods/095a7ca7-bda1-498c-8a6d-16de67eb0a70/volumes" Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.704970 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dda2bf4-170e-4605-a1fa-2cac07031e03" path="/var/lib/kubelet/pods/9dda2bf4-170e-4605-a1fa-2cac07031e03/volumes" Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.706487 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7616359-f18a-4fba-b35a-327e65a0c05d" path="/var/lib/kubelet/pods/d7616359-f18a-4fba-b35a-327e65a0c05d/volumes" Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.902437 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3e11f1c6-6535-453c-86b9-98c1ba7abd72","Type":"ContainerStarted","Data":"8f8bc09b7562f9a2d13b939d1ac956ee19710947ea79958bff186d81c18112ba"} Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.902481 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3e11f1c6-6535-453c-86b9-98c1ba7abd72","Type":"ContainerStarted","Data":"932227bbeac2edadcaafb982a7b77e381b59af0ace458bf200b8ec0c30a9aac9"} Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.905804 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1","Type":"ContainerStarted","Data":"eee190d73383e3a230fa183925148a18e1280eb0d5e3876c8dda27b8b20bb53c"} Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.909344 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"69e69cac-c659-4973-a220-82c222df7c35","Type":"ContainerStarted","Data":"57fe40d601c9d9899ecd694241d63149e4b7fd73f713fff47cac69618cdf3664"} Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.909380 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"69e69cac-c659-4973-a220-82c222df7c35","Type":"ContainerStarted","Data":"f97b2b62725e22439251badef4dce9a710b9cd08a7abef32d1f7a2279a16131e"} Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.911100 4809 generic.go:334] "Generic (PLEG): container finished" podID="14f049a6-d608-4fd3-84ac-4c264cdfde1f" containerID="3a4b4929ab4169b178cb83c37f5c763049a009176fe604c3a885772dba89b93e" exitCode=0 Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.911154 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68df85789f-j6gch" event={"ID":"14f049a6-d608-4fd3-84ac-4c264cdfde1f","Type":"ContainerDied","Data":"3a4b4929ab4169b178cb83c37f5c763049a009176fe604c3a885772dba89b93e"} Sep 30 00:33:57 crc kubenswrapper[4809]: I0930 00:33:57.911183 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68df85789f-j6gch" event={"ID":"14f049a6-d608-4fd3-84ac-4c264cdfde1f","Type":"ContainerStarted","Data":"9c58ee8013bf0301df3adffaeb2b1772bbe20a7ad8047633c848af0ce7b8599e"} Sep 30 00:33:58 crc kubenswrapper[4809]: I0930 00:33:58.923496 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1","Type":"ContainerStarted","Data":"ad2081442abe517315210f46018d2b78c6913ad8da346ada4518872d65c3f160"} Sep 30 00:33:58 crc kubenswrapper[4809]: I0930 00:33:58.925888 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68df85789f-j6gch" event={"ID":"14f049a6-d608-4fd3-84ac-4c264cdfde1f","Type":"ContainerStarted","Data":"30aa4813f189755669b0bb66abe79f8f88dd01c806f999b32bc304b3e82b46ff"} Sep 30 00:33:58 crc kubenswrapper[4809]: I0930 00:33:58.951076 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-68df85789f-j6gch" podStartSLOduration=7.951052266 podStartE2EDuration="7.951052266s" podCreationTimestamp="2025-09-30 00:33:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:33:58.945080614 +0000 UTC m=+1489.981330022" watchObservedRunningTime="2025-09-30 00:33:58.951052266 +0000 UTC m=+1489.987301684" Sep 30 00:33:59 crc kubenswrapper[4809]: I0930 00:33:59.937244 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:34:00 crc kubenswrapper[4809]: E0930 00:34:00.199401 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" Sep 30 00:34:00 crc kubenswrapper[4809]: I0930 00:34:00.951167 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1","Type":"ContainerStarted","Data":"fc66edf39074ad29033c8ec24e5eb248910800f641f53339f96195a8d7626611"} Sep 30 00:34:00 crc kubenswrapper[4809]: E0930 00:34:00.953689 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested\\\"\"" pod="openstack/ceilometer-0" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" Sep 30 00:34:01 crc kubenswrapper[4809]: I0930 00:34:01.965708 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 00:34:01 crc kubenswrapper[4809]: E0930 00:34:01.974089 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested\\\"\"" pod="openstack/ceilometer-0" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" Sep 30 00:34:02 crc kubenswrapper[4809]: E0930 00:34:02.981699 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested\\\"\"" pod="openstack/ceilometer-0" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" Sep 30 00:34:06 crc kubenswrapper[4809]: I0930 00:34:06.934866 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.073986 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79b5d74c8c-r2648"] Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.074289 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" podUID="9709db2d-cb05-4c93-bf1f-ab235ff99e4b" containerName="dnsmasq-dns" containerID="cri-o://561b3ad42234e5771ab0886c010aafba6e6392be16e1e13c7421bb46cd3b4629" gracePeriod=10 Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.224331 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-768b698657-rwt8x"] Sep 30 00:34:07 crc kubenswrapper[4809]: E0930 00:34:07.224813 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dda2bf4-170e-4605-a1fa-2cac07031e03" containerName="extract-content" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.224827 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dda2bf4-170e-4605-a1fa-2cac07031e03" containerName="extract-content" Sep 30 00:34:07 crc kubenswrapper[4809]: E0930 00:34:07.224840 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dda2bf4-170e-4605-a1fa-2cac07031e03" containerName="extract-utilities" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.224846 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dda2bf4-170e-4605-a1fa-2cac07031e03" containerName="extract-utilities" Sep 30 00:34:07 crc kubenswrapper[4809]: E0930 00:34:07.224855 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dda2bf4-170e-4605-a1fa-2cac07031e03" containerName="registry-server" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.224862 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dda2bf4-170e-4605-a1fa-2cac07031e03" containerName="registry-server" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.225098 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dda2bf4-170e-4605-a1fa-2cac07031e03" containerName="registry-server" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.226206 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.233400 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-768b698657-rwt8x"] Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.332320 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-ovsdbserver-sb\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.332807 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-config\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.332837 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-ovsdbserver-nb\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.332868 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-dns-swift-storage-0\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.332892 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-dns-svc\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.332915 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mm7mq\" (UniqueName: \"kubernetes.io/projected/207213cd-fc29-4597-936a-e8f332106ee7-kube-api-access-mm7mq\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.332936 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-openstack-edpm-ipam\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.434686 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-ovsdbserver-nb\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.434753 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-dns-swift-storage-0\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.434779 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-dns-svc\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.434801 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mm7mq\" (UniqueName: \"kubernetes.io/projected/207213cd-fc29-4597-936a-e8f332106ee7-kube-api-access-mm7mq\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.434822 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-openstack-edpm-ipam\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.434924 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-ovsdbserver-sb\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.435008 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-config\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.435929 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-config\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.436549 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-ovsdbserver-nb\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.441156 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-dns-swift-storage-0\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.441302 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-dns-svc\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.441487 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-openstack-edpm-ipam\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.441767 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-ovsdbserver-sb\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.463339 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mm7mq\" (UniqueName: \"kubernetes.io/projected/207213cd-fc29-4597-936a-e8f332106ee7-kube-api-access-mm7mq\") pod \"dnsmasq-dns-768b698657-rwt8x\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.582415 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.769115 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.944283 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7fvg\" (UniqueName: \"kubernetes.io/projected/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-kube-api-access-v7fvg\") pod \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.944402 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-dns-swift-storage-0\") pod \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.944456 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-dns-svc\") pod \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.944484 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-ovsdbserver-nb\") pod \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.944590 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-config\") pod \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.944613 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-ovsdbserver-sb\") pod \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " Sep 30 00:34:07 crc kubenswrapper[4809]: I0930 00:34:07.955859 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-kube-api-access-v7fvg" (OuterVolumeSpecName: "kube-api-access-v7fvg") pod "9709db2d-cb05-4c93-bf1f-ab235ff99e4b" (UID: "9709db2d-cb05-4c93-bf1f-ab235ff99e4b"). InnerVolumeSpecName "kube-api-access-v7fvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.029403 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9709db2d-cb05-4c93-bf1f-ab235ff99e4b" (UID: "9709db2d-cb05-4c93-bf1f-ab235ff99e4b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.047139 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7fvg\" (UniqueName: \"kubernetes.io/projected/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-kube-api-access-v7fvg\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.047162 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.048008 4809 generic.go:334] "Generic (PLEG): container finished" podID="9709db2d-cb05-4c93-bf1f-ab235ff99e4b" containerID="561b3ad42234e5771ab0886c010aafba6e6392be16e1e13c7421bb46cd3b4629" exitCode=0 Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.048042 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" event={"ID":"9709db2d-cb05-4c93-bf1f-ab235ff99e4b","Type":"ContainerDied","Data":"561b3ad42234e5771ab0886c010aafba6e6392be16e1e13c7421bb46cd3b4629"} Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.048068 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" event={"ID":"9709db2d-cb05-4c93-bf1f-ab235ff99e4b","Type":"ContainerDied","Data":"da3b0178874f87570e157a36f4a5dac102578fb6611f49fd0a55ce4fdd187c22"} Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.048085 4809 scope.go:117] "RemoveContainer" containerID="561b3ad42234e5771ab0886c010aafba6e6392be16e1e13c7421bb46cd3b4629" Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.048198 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79b5d74c8c-r2648" Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.054019 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-config" (OuterVolumeSpecName: "config") pod "9709db2d-cb05-4c93-bf1f-ab235ff99e4b" (UID: "9709db2d-cb05-4c93-bf1f-ab235ff99e4b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.070257 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9709db2d-cb05-4c93-bf1f-ab235ff99e4b" (UID: "9709db2d-cb05-4c93-bf1f-ab235ff99e4b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.082297 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9709db2d-cb05-4c93-bf1f-ab235ff99e4b" (UID: "9709db2d-cb05-4c93-bf1f-ab235ff99e4b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.148636 4809 scope.go:117] "RemoveContainer" containerID="4b7034503237629d2ae043f5dfdda32216255d2f333237e70a60cf2700c877fb" Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.149913 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9709db2d-cb05-4c93-bf1f-ab235ff99e4b" (UID: "9709db2d-cb05-4c93-bf1f-ab235ff99e4b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.150216 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-ovsdbserver-sb\") pod \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\" (UID: \"9709db2d-cb05-4c93-bf1f-ab235ff99e4b\") " Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.150877 4809 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.150915 4809 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:08 crc kubenswrapper[4809]: W0930 00:34:08.150915 4809 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/9709db2d-cb05-4c93-bf1f-ab235ff99e4b/volumes/kubernetes.io~configmap/ovsdbserver-sb Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.150926 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.150933 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9709db2d-cb05-4c93-bf1f-ab235ff99e4b" (UID: "9709db2d-cb05-4c93-bf1f-ab235ff99e4b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.184814 4809 scope.go:117] "RemoveContainer" containerID="561b3ad42234e5771ab0886c010aafba6e6392be16e1e13c7421bb46cd3b4629" Sep 30 00:34:08 crc kubenswrapper[4809]: E0930 00:34:08.185785 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"561b3ad42234e5771ab0886c010aafba6e6392be16e1e13c7421bb46cd3b4629\": container with ID starting with 561b3ad42234e5771ab0886c010aafba6e6392be16e1e13c7421bb46cd3b4629 not found: ID does not exist" containerID="561b3ad42234e5771ab0886c010aafba6e6392be16e1e13c7421bb46cd3b4629" Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.185827 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"561b3ad42234e5771ab0886c010aafba6e6392be16e1e13c7421bb46cd3b4629"} err="failed to get container status \"561b3ad42234e5771ab0886c010aafba6e6392be16e1e13c7421bb46cd3b4629\": rpc error: code = NotFound desc = could not find container \"561b3ad42234e5771ab0886c010aafba6e6392be16e1e13c7421bb46cd3b4629\": container with ID starting with 561b3ad42234e5771ab0886c010aafba6e6392be16e1e13c7421bb46cd3b4629 not found: ID does not exist" Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.185851 4809 scope.go:117] "RemoveContainer" containerID="4b7034503237629d2ae043f5dfdda32216255d2f333237e70a60cf2700c877fb" Sep 30 00:34:08 crc kubenswrapper[4809]: E0930 00:34:08.188695 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b7034503237629d2ae043f5dfdda32216255d2f333237e70a60cf2700c877fb\": container with ID starting with 4b7034503237629d2ae043f5dfdda32216255d2f333237e70a60cf2700c877fb not found: ID does not exist" containerID="4b7034503237629d2ae043f5dfdda32216255d2f333237e70a60cf2700c877fb" Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.188731 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b7034503237629d2ae043f5dfdda32216255d2f333237e70a60cf2700c877fb"} err="failed to get container status \"4b7034503237629d2ae043f5dfdda32216255d2f333237e70a60cf2700c877fb\": rpc error: code = NotFound desc = could not find container \"4b7034503237629d2ae043f5dfdda32216255d2f333237e70a60cf2700c877fb\": container with ID starting with 4b7034503237629d2ae043f5dfdda32216255d2f333237e70a60cf2700c877fb not found: ID does not exist" Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.221724 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-768b698657-rwt8x"] Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.253808 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9709db2d-cb05-4c93-bf1f-ab235ff99e4b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.421488 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79b5d74c8c-r2648"] Sep 30 00:34:08 crc kubenswrapper[4809]: I0930 00:34:08.436186 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-79b5d74c8c-r2648"] Sep 30 00:34:09 crc kubenswrapper[4809]: I0930 00:34:09.061937 4809 generic.go:334] "Generic (PLEG): container finished" podID="207213cd-fc29-4597-936a-e8f332106ee7" containerID="7c02d1996d01e573adbf429c70debdb892204128d1239b0286342b8a6ee46524" exitCode=0 Sep 30 00:34:09 crc kubenswrapper[4809]: I0930 00:34:09.061972 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-768b698657-rwt8x" event={"ID":"207213cd-fc29-4597-936a-e8f332106ee7","Type":"ContainerDied","Data":"7c02d1996d01e573adbf429c70debdb892204128d1239b0286342b8a6ee46524"} Sep 30 00:34:09 crc kubenswrapper[4809]: I0930 00:34:09.061994 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-768b698657-rwt8x" event={"ID":"207213cd-fc29-4597-936a-e8f332106ee7","Type":"ContainerStarted","Data":"ddba901eb47490e6feb9081cb28b1b74ee46f8684154c88730b21506046abf3a"} Sep 30 00:34:09 crc kubenswrapper[4809]: I0930 00:34:09.719792 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9709db2d-cb05-4c93-bf1f-ab235ff99e4b" path="/var/lib/kubelet/pods/9709db2d-cb05-4c93-bf1f-ab235ff99e4b/volumes" Sep 30 00:34:10 crc kubenswrapper[4809]: I0930 00:34:10.082850 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-768b698657-rwt8x" event={"ID":"207213cd-fc29-4597-936a-e8f332106ee7","Type":"ContainerStarted","Data":"64f265b9f3640b0bf65dd37eccf84f09e55dac5818dd9b8ed55f62ebe4a80b11"} Sep 30 00:34:10 crc kubenswrapper[4809]: I0930 00:34:10.084184 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:10 crc kubenswrapper[4809]: I0930 00:34:10.108894 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-768b698657-rwt8x" podStartSLOduration=3.108873234 podStartE2EDuration="3.108873234s" podCreationTimestamp="2025-09-30 00:34:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:34:10.102064039 +0000 UTC m=+1501.138313457" watchObservedRunningTime="2025-09-30 00:34:10.108873234 +0000 UTC m=+1501.145122642" Sep 30 00:34:11 crc kubenswrapper[4809]: I0930 00:34:11.099177 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-5gbcd" event={"ID":"feb60974-e32b-4f1e-bcd2-2647c3dc05eb","Type":"ContainerStarted","Data":"131483e9532964c02cdb7ff8df9f7c01820fb295dc92898e51e714d50a0abbbf"} Sep 30 00:34:11 crc kubenswrapper[4809]: I0930 00:34:11.127739 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-5gbcd" podStartSLOduration=2.456981596 podStartE2EDuration="43.127720536s" podCreationTimestamp="2025-09-30 00:33:28 +0000 UTC" firstStartedPulling="2025-09-30 00:33:29.237820765 +0000 UTC m=+1460.274070163" lastFinishedPulling="2025-09-30 00:34:09.908559695 +0000 UTC m=+1500.944809103" observedRunningTime="2025-09-30 00:34:11.118379013 +0000 UTC m=+1502.154628441" watchObservedRunningTime="2025-09-30 00:34:11.127720536 +0000 UTC m=+1502.163969944" Sep 30 00:34:12 crc kubenswrapper[4809]: I0930 00:34:12.114762 4809 generic.go:334] "Generic (PLEG): container finished" podID="feb60974-e32b-4f1e-bcd2-2647c3dc05eb" containerID="131483e9532964c02cdb7ff8df9f7c01820fb295dc92898e51e714d50a0abbbf" exitCode=0 Sep 30 00:34:12 crc kubenswrapper[4809]: I0930 00:34:12.114877 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-5gbcd" event={"ID":"feb60974-e32b-4f1e-bcd2-2647c3dc05eb","Type":"ContainerDied","Data":"131483e9532964c02cdb7ff8df9f7c01820fb295dc92898e51e714d50a0abbbf"} Sep 30 00:34:13 crc kubenswrapper[4809]: I0930 00:34:13.671215 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-5gbcd" Sep 30 00:34:13 crc kubenswrapper[4809]: I0930 00:34:13.769322 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feb60974-e32b-4f1e-bcd2-2647c3dc05eb-config-data\") pod \"feb60974-e32b-4f1e-bcd2-2647c3dc05eb\" (UID: \"feb60974-e32b-4f1e-bcd2-2647c3dc05eb\") " Sep 30 00:34:13 crc kubenswrapper[4809]: I0930 00:34:13.769407 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feb60974-e32b-4f1e-bcd2-2647c3dc05eb-combined-ca-bundle\") pod \"feb60974-e32b-4f1e-bcd2-2647c3dc05eb\" (UID: \"feb60974-e32b-4f1e-bcd2-2647c3dc05eb\") " Sep 30 00:34:13 crc kubenswrapper[4809]: I0930 00:34:13.769488 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sc5l9\" (UniqueName: \"kubernetes.io/projected/feb60974-e32b-4f1e-bcd2-2647c3dc05eb-kube-api-access-sc5l9\") pod \"feb60974-e32b-4f1e-bcd2-2647c3dc05eb\" (UID: \"feb60974-e32b-4f1e-bcd2-2647c3dc05eb\") " Sep 30 00:34:13 crc kubenswrapper[4809]: I0930 00:34:13.778477 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/feb60974-e32b-4f1e-bcd2-2647c3dc05eb-kube-api-access-sc5l9" (OuterVolumeSpecName: "kube-api-access-sc5l9") pod "feb60974-e32b-4f1e-bcd2-2647c3dc05eb" (UID: "feb60974-e32b-4f1e-bcd2-2647c3dc05eb"). InnerVolumeSpecName "kube-api-access-sc5l9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:34:13 crc kubenswrapper[4809]: I0930 00:34:13.807029 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feb60974-e32b-4f1e-bcd2-2647c3dc05eb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "feb60974-e32b-4f1e-bcd2-2647c3dc05eb" (UID: "feb60974-e32b-4f1e-bcd2-2647c3dc05eb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:13 crc kubenswrapper[4809]: I0930 00:34:13.861889 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feb60974-e32b-4f1e-bcd2-2647c3dc05eb-config-data" (OuterVolumeSpecName: "config-data") pod "feb60974-e32b-4f1e-bcd2-2647c3dc05eb" (UID: "feb60974-e32b-4f1e-bcd2-2647c3dc05eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:13 crc kubenswrapper[4809]: I0930 00:34:13.873852 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feb60974-e32b-4f1e-bcd2-2647c3dc05eb-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:13 crc kubenswrapper[4809]: I0930 00:34:13.873887 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feb60974-e32b-4f1e-bcd2-2647c3dc05eb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:13 crc kubenswrapper[4809]: I0930 00:34:13.873903 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sc5l9\" (UniqueName: \"kubernetes.io/projected/feb60974-e32b-4f1e-bcd2-2647c3dc05eb-kube-api-access-sc5l9\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:14 crc kubenswrapper[4809]: I0930 00:34:14.144483 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-5gbcd" event={"ID":"feb60974-e32b-4f1e-bcd2-2647c3dc05eb","Type":"ContainerDied","Data":"0242797c6cbaca92baeaae28519c51c4214cb1b60c24ddd537dc2532080ad30f"} Sep 30 00:34:14 crc kubenswrapper[4809]: I0930 00:34:14.144526 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0242797c6cbaca92baeaae28519c51c4214cb1b60c24ddd537dc2532080ad30f" Sep 30 00:34:14 crc kubenswrapper[4809]: I0930 00:34:14.144531 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-5gbcd" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.114744 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-6b6bcf86f-4vgsr"] Sep 30 00:34:15 crc kubenswrapper[4809]: E0930 00:34:15.115823 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9709db2d-cb05-4c93-bf1f-ab235ff99e4b" containerName="dnsmasq-dns" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.115841 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9709db2d-cb05-4c93-bf1f-ab235ff99e4b" containerName="dnsmasq-dns" Sep 30 00:34:15 crc kubenswrapper[4809]: E0930 00:34:15.115875 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9709db2d-cb05-4c93-bf1f-ab235ff99e4b" containerName="init" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.115886 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9709db2d-cb05-4c93-bf1f-ab235ff99e4b" containerName="init" Sep 30 00:34:15 crc kubenswrapper[4809]: E0930 00:34:15.115915 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="feb60974-e32b-4f1e-bcd2-2647c3dc05eb" containerName="heat-db-sync" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.115928 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="feb60974-e32b-4f1e-bcd2-2647c3dc05eb" containerName="heat-db-sync" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.116266 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9709db2d-cb05-4c93-bf1f-ab235ff99e4b" containerName="dnsmasq-dns" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.116318 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="feb60974-e32b-4f1e-bcd2-2647c3dc05eb" containerName="heat-db-sync" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.117449 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-6b6bcf86f-4vgsr" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.127695 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-6b6bcf86f-4vgsr"] Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.144705 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-86f457b754-mllm6"] Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.146480 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.185441 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-86f457b754-mllm6"] Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.210030 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-6786b5c967-grl5d"] Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.211425 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.211440 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0702a54-83ed-4e06-a9f6-91fd8c106cf4-config-data-custom\") pod \"heat-api-86f457b754-mllm6\" (UID: \"f0702a54-83ed-4e06-a9f6-91fd8c106cf4\") " pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.211506 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsks4\" (UniqueName: \"kubernetes.io/projected/2c2cf376-aff0-4120-b4bd-3b3e4076d5e9-kube-api-access-zsks4\") pod \"heat-engine-6b6bcf86f-4vgsr\" (UID: \"2c2cf376-aff0-4120-b4bd-3b3e4076d5e9\") " pod="openstack/heat-engine-6b6bcf86f-4vgsr" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.211533 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-564vn\" (UniqueName: \"kubernetes.io/projected/f0702a54-83ed-4e06-a9f6-91fd8c106cf4-kube-api-access-564vn\") pod \"heat-api-86f457b754-mllm6\" (UID: \"f0702a54-83ed-4e06-a9f6-91fd8c106cf4\") " pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.211576 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0702a54-83ed-4e06-a9f6-91fd8c106cf4-config-data\") pod \"heat-api-86f457b754-mllm6\" (UID: \"f0702a54-83ed-4e06-a9f6-91fd8c106cf4\") " pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.211603 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2c2cf376-aff0-4120-b4bd-3b3e4076d5e9-config-data-custom\") pod \"heat-engine-6b6bcf86f-4vgsr\" (UID: \"2c2cf376-aff0-4120-b4bd-3b3e4076d5e9\") " pod="openstack/heat-engine-6b6bcf86f-4vgsr" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.211669 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0702a54-83ed-4e06-a9f6-91fd8c106cf4-combined-ca-bundle\") pod \"heat-api-86f457b754-mllm6\" (UID: \"f0702a54-83ed-4e06-a9f6-91fd8c106cf4\") " pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.211702 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0702a54-83ed-4e06-a9f6-91fd8c106cf4-internal-tls-certs\") pod \"heat-api-86f457b754-mllm6\" (UID: \"f0702a54-83ed-4e06-a9f6-91fd8c106cf4\") " pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.211741 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c2cf376-aff0-4120-b4bd-3b3e4076d5e9-combined-ca-bundle\") pod \"heat-engine-6b6bcf86f-4vgsr\" (UID: \"2c2cf376-aff0-4120-b4bd-3b3e4076d5e9\") " pod="openstack/heat-engine-6b6bcf86f-4vgsr" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.211831 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0702a54-83ed-4e06-a9f6-91fd8c106cf4-public-tls-certs\") pod \"heat-api-86f457b754-mllm6\" (UID: \"f0702a54-83ed-4e06-a9f6-91fd8c106cf4\") " pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.211854 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c2cf376-aff0-4120-b4bd-3b3e4076d5e9-config-data\") pod \"heat-engine-6b6bcf86f-4vgsr\" (UID: \"2c2cf376-aff0-4120-b4bd-3b3e4076d5e9\") " pod="openstack/heat-engine-6b6bcf86f-4vgsr" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.249732 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-6786b5c967-grl5d"] Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.313044 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0702a54-83ed-4e06-a9f6-91fd8c106cf4-combined-ca-bundle\") pod \"heat-api-86f457b754-mllm6\" (UID: \"f0702a54-83ed-4e06-a9f6-91fd8c106cf4\") " pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.313098 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0702a54-83ed-4e06-a9f6-91fd8c106cf4-internal-tls-certs\") pod \"heat-api-86f457b754-mllm6\" (UID: \"f0702a54-83ed-4e06-a9f6-91fd8c106cf4\") " pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.313133 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c2cf376-aff0-4120-b4bd-3b3e4076d5e9-combined-ca-bundle\") pod \"heat-engine-6b6bcf86f-4vgsr\" (UID: \"2c2cf376-aff0-4120-b4bd-3b3e4076d5e9\") " pod="openstack/heat-engine-6b6bcf86f-4vgsr" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.313184 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/60c21ffa-346c-4c88-a449-a8b99906421e-internal-tls-certs\") pod \"heat-cfnapi-6786b5c967-grl5d\" (UID: \"60c21ffa-346c-4c88-a449-a8b99906421e\") " pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.313223 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/60c21ffa-346c-4c88-a449-a8b99906421e-config-data-custom\") pod \"heat-cfnapi-6786b5c967-grl5d\" (UID: \"60c21ffa-346c-4c88-a449-a8b99906421e\") " pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.313247 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0702a54-83ed-4e06-a9f6-91fd8c106cf4-public-tls-certs\") pod \"heat-api-86f457b754-mllm6\" (UID: \"f0702a54-83ed-4e06-a9f6-91fd8c106cf4\") " pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.313269 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/60c21ffa-346c-4c88-a449-a8b99906421e-public-tls-certs\") pod \"heat-cfnapi-6786b5c967-grl5d\" (UID: \"60c21ffa-346c-4c88-a449-a8b99906421e\") " pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.313288 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c2cf376-aff0-4120-b4bd-3b3e4076d5e9-config-data\") pod \"heat-engine-6b6bcf86f-4vgsr\" (UID: \"2c2cf376-aff0-4120-b4bd-3b3e4076d5e9\") " pod="openstack/heat-engine-6b6bcf86f-4vgsr" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.313307 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60c21ffa-346c-4c88-a449-a8b99906421e-combined-ca-bundle\") pod \"heat-cfnapi-6786b5c967-grl5d\" (UID: \"60c21ffa-346c-4c88-a449-a8b99906421e\") " pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.313328 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0702a54-83ed-4e06-a9f6-91fd8c106cf4-config-data-custom\") pod \"heat-api-86f457b754-mllm6\" (UID: \"f0702a54-83ed-4e06-a9f6-91fd8c106cf4\") " pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.313353 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60c21ffa-346c-4c88-a449-a8b99906421e-config-data\") pod \"heat-cfnapi-6786b5c967-grl5d\" (UID: \"60c21ffa-346c-4c88-a449-a8b99906421e\") " pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.313382 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsks4\" (UniqueName: \"kubernetes.io/projected/2c2cf376-aff0-4120-b4bd-3b3e4076d5e9-kube-api-access-zsks4\") pod \"heat-engine-6b6bcf86f-4vgsr\" (UID: \"2c2cf376-aff0-4120-b4bd-3b3e4076d5e9\") " pod="openstack/heat-engine-6b6bcf86f-4vgsr" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.313406 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-564vn\" (UniqueName: \"kubernetes.io/projected/f0702a54-83ed-4e06-a9f6-91fd8c106cf4-kube-api-access-564vn\") pod \"heat-api-86f457b754-mllm6\" (UID: \"f0702a54-83ed-4e06-a9f6-91fd8c106cf4\") " pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.313439 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0702a54-83ed-4e06-a9f6-91fd8c106cf4-config-data\") pod \"heat-api-86f457b754-mllm6\" (UID: \"f0702a54-83ed-4e06-a9f6-91fd8c106cf4\") " pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.313466 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2c2cf376-aff0-4120-b4bd-3b3e4076d5e9-config-data-custom\") pod \"heat-engine-6b6bcf86f-4vgsr\" (UID: \"2c2cf376-aff0-4120-b4bd-3b3e4076d5e9\") " pod="openstack/heat-engine-6b6bcf86f-4vgsr" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.313489 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dph58\" (UniqueName: \"kubernetes.io/projected/60c21ffa-346c-4c88-a449-a8b99906421e-kube-api-access-dph58\") pod \"heat-cfnapi-6786b5c967-grl5d\" (UID: \"60c21ffa-346c-4c88-a449-a8b99906421e\") " pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.318196 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0702a54-83ed-4e06-a9f6-91fd8c106cf4-combined-ca-bundle\") pod \"heat-api-86f457b754-mllm6\" (UID: \"f0702a54-83ed-4e06-a9f6-91fd8c106cf4\") " pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.318634 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0702a54-83ed-4e06-a9f6-91fd8c106cf4-internal-tls-certs\") pod \"heat-api-86f457b754-mllm6\" (UID: \"f0702a54-83ed-4e06-a9f6-91fd8c106cf4\") " pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.318887 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0702a54-83ed-4e06-a9f6-91fd8c106cf4-config-data-custom\") pod \"heat-api-86f457b754-mllm6\" (UID: \"f0702a54-83ed-4e06-a9f6-91fd8c106cf4\") " pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.319450 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0702a54-83ed-4e06-a9f6-91fd8c106cf4-public-tls-certs\") pod \"heat-api-86f457b754-mllm6\" (UID: \"f0702a54-83ed-4e06-a9f6-91fd8c106cf4\") " pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.321210 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c2cf376-aff0-4120-b4bd-3b3e4076d5e9-combined-ca-bundle\") pod \"heat-engine-6b6bcf86f-4vgsr\" (UID: \"2c2cf376-aff0-4120-b4bd-3b3e4076d5e9\") " pod="openstack/heat-engine-6b6bcf86f-4vgsr" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.324576 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0702a54-83ed-4e06-a9f6-91fd8c106cf4-config-data\") pod \"heat-api-86f457b754-mllm6\" (UID: \"f0702a54-83ed-4e06-a9f6-91fd8c106cf4\") " pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.333320 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-564vn\" (UniqueName: \"kubernetes.io/projected/f0702a54-83ed-4e06-a9f6-91fd8c106cf4-kube-api-access-564vn\") pod \"heat-api-86f457b754-mllm6\" (UID: \"f0702a54-83ed-4e06-a9f6-91fd8c106cf4\") " pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.333754 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2c2cf376-aff0-4120-b4bd-3b3e4076d5e9-config-data-custom\") pod \"heat-engine-6b6bcf86f-4vgsr\" (UID: \"2c2cf376-aff0-4120-b4bd-3b3e4076d5e9\") " pod="openstack/heat-engine-6b6bcf86f-4vgsr" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.335196 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsks4\" (UniqueName: \"kubernetes.io/projected/2c2cf376-aff0-4120-b4bd-3b3e4076d5e9-kube-api-access-zsks4\") pod \"heat-engine-6b6bcf86f-4vgsr\" (UID: \"2c2cf376-aff0-4120-b4bd-3b3e4076d5e9\") " pod="openstack/heat-engine-6b6bcf86f-4vgsr" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.337976 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c2cf376-aff0-4120-b4bd-3b3e4076d5e9-config-data\") pod \"heat-engine-6b6bcf86f-4vgsr\" (UID: \"2c2cf376-aff0-4120-b4bd-3b3e4076d5e9\") " pod="openstack/heat-engine-6b6bcf86f-4vgsr" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.415662 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/60c21ffa-346c-4c88-a449-a8b99906421e-config-data-custom\") pod \"heat-cfnapi-6786b5c967-grl5d\" (UID: \"60c21ffa-346c-4c88-a449-a8b99906421e\") " pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.416062 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/60c21ffa-346c-4c88-a449-a8b99906421e-public-tls-certs\") pod \"heat-cfnapi-6786b5c967-grl5d\" (UID: \"60c21ffa-346c-4c88-a449-a8b99906421e\") " pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.416165 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60c21ffa-346c-4c88-a449-a8b99906421e-combined-ca-bundle\") pod \"heat-cfnapi-6786b5c967-grl5d\" (UID: \"60c21ffa-346c-4c88-a449-a8b99906421e\") " pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.416252 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60c21ffa-346c-4c88-a449-a8b99906421e-config-data\") pod \"heat-cfnapi-6786b5c967-grl5d\" (UID: \"60c21ffa-346c-4c88-a449-a8b99906421e\") " pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.416387 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dph58\" (UniqueName: \"kubernetes.io/projected/60c21ffa-346c-4c88-a449-a8b99906421e-kube-api-access-dph58\") pod \"heat-cfnapi-6786b5c967-grl5d\" (UID: \"60c21ffa-346c-4c88-a449-a8b99906421e\") " pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.416520 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/60c21ffa-346c-4c88-a449-a8b99906421e-internal-tls-certs\") pod \"heat-cfnapi-6786b5c967-grl5d\" (UID: \"60c21ffa-346c-4c88-a449-a8b99906421e\") " pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.420600 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/60c21ffa-346c-4c88-a449-a8b99906421e-public-tls-certs\") pod \"heat-cfnapi-6786b5c967-grl5d\" (UID: \"60c21ffa-346c-4c88-a449-a8b99906421e\") " pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.421420 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/60c21ffa-346c-4c88-a449-a8b99906421e-config-data-custom\") pod \"heat-cfnapi-6786b5c967-grl5d\" (UID: \"60c21ffa-346c-4c88-a449-a8b99906421e\") " pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.422787 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60c21ffa-346c-4c88-a449-a8b99906421e-config-data\") pod \"heat-cfnapi-6786b5c967-grl5d\" (UID: \"60c21ffa-346c-4c88-a449-a8b99906421e\") " pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.425628 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/60c21ffa-346c-4c88-a449-a8b99906421e-internal-tls-certs\") pod \"heat-cfnapi-6786b5c967-grl5d\" (UID: \"60c21ffa-346c-4c88-a449-a8b99906421e\") " pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.428040 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60c21ffa-346c-4c88-a449-a8b99906421e-combined-ca-bundle\") pod \"heat-cfnapi-6786b5c967-grl5d\" (UID: \"60c21ffa-346c-4c88-a449-a8b99906421e\") " pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.434349 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dph58\" (UniqueName: \"kubernetes.io/projected/60c21ffa-346c-4c88-a449-a8b99906421e-kube-api-access-dph58\") pod \"heat-cfnapi-6786b5c967-grl5d\" (UID: \"60c21ffa-346c-4c88-a449-a8b99906421e\") " pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.436549 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-6b6bcf86f-4vgsr" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.483608 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:15 crc kubenswrapper[4809]: I0930 00:34:15.531937 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:16 crc kubenswrapper[4809]: I0930 00:34:16.033105 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-6b6bcf86f-4vgsr"] Sep 30 00:34:16 crc kubenswrapper[4809]: I0930 00:34:16.138338 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-86f457b754-mllm6"] Sep 30 00:34:16 crc kubenswrapper[4809]: W0930 00:34:16.146208 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf0702a54_83ed_4e06_a9f6_91fd8c106cf4.slice/crio-358271c27566aaedcb6e098e7d4792490f5c3ea2f9260ea3faba7b03f85b4e6d WatchSource:0}: Error finding container 358271c27566aaedcb6e098e7d4792490f5c3ea2f9260ea3faba7b03f85b4e6d: Status 404 returned error can't find the container with id 358271c27566aaedcb6e098e7d4792490f5c3ea2f9260ea3faba7b03f85b4e6d Sep 30 00:34:16 crc kubenswrapper[4809]: W0930 00:34:16.146739 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod60c21ffa_346c_4c88_a449_a8b99906421e.slice/crio-ef80c72796d614c9b1f325effc0b02d871b4c6b03b21f539f6a9f43d0a611db3 WatchSource:0}: Error finding container ef80c72796d614c9b1f325effc0b02d871b4c6b03b21f539f6a9f43d0a611db3: Status 404 returned error can't find the container with id ef80c72796d614c9b1f325effc0b02d871b4c6b03b21f539f6a9f43d0a611db3 Sep 30 00:34:16 crc kubenswrapper[4809]: I0930 00:34:16.148249 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-6786b5c967-grl5d"] Sep 30 00:34:16 crc kubenswrapper[4809]: I0930 00:34:16.192566 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-86f457b754-mllm6" event={"ID":"f0702a54-83ed-4e06-a9f6-91fd8c106cf4","Type":"ContainerStarted","Data":"358271c27566aaedcb6e098e7d4792490f5c3ea2f9260ea3faba7b03f85b4e6d"} Sep 30 00:34:16 crc kubenswrapper[4809]: I0930 00:34:16.194110 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-6786b5c967-grl5d" event={"ID":"60c21ffa-346c-4c88-a449-a8b99906421e","Type":"ContainerStarted","Data":"ef80c72796d614c9b1f325effc0b02d871b4c6b03b21f539f6a9f43d0a611db3"} Sep 30 00:34:16 crc kubenswrapper[4809]: I0930 00:34:16.195365 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-6b6bcf86f-4vgsr" event={"ID":"2c2cf376-aff0-4120-b4bd-3b3e4076d5e9","Type":"ContainerStarted","Data":"258a44517c49faf8f7d0023c0b7a4012892a137152db31d9948377f58be3d5e8"} Sep 30 00:34:16 crc kubenswrapper[4809]: I0930 00:34:16.848888 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 00:34:17 crc kubenswrapper[4809]: I0930 00:34:17.218483 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-6b6bcf86f-4vgsr" event={"ID":"2c2cf376-aff0-4120-b4bd-3b3e4076d5e9","Type":"ContainerStarted","Data":"af54558994ee5689ff8527af704023362a51cc40b6da600286504e21a9b29622"} Sep 30 00:34:17 crc kubenswrapper[4809]: I0930 00:34:17.218945 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-6b6bcf86f-4vgsr" Sep 30 00:34:17 crc kubenswrapper[4809]: I0930 00:34:17.246369 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-6b6bcf86f-4vgsr" podStartSLOduration=2.24633896 podStartE2EDuration="2.24633896s" podCreationTimestamp="2025-09-30 00:34:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:34:17.236390802 +0000 UTC m=+1508.272640220" watchObservedRunningTime="2025-09-30 00:34:17.24633896 +0000 UTC m=+1508.282588368" Sep 30 00:34:17 crc kubenswrapper[4809]: I0930 00:34:17.584823 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 00:34:17 crc kubenswrapper[4809]: I0930 00:34:17.649818 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68df85789f-j6gch"] Sep 30 00:34:17 crc kubenswrapper[4809]: I0930 00:34:17.650049 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-68df85789f-j6gch" podUID="14f049a6-d608-4fd3-84ac-4c264cdfde1f" containerName="dnsmasq-dns" containerID="cri-o://30aa4813f189755669b0bb66abe79f8f88dd01c806f999b32bc304b3e82b46ff" gracePeriod=10 Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.239224 4809 generic.go:334] "Generic (PLEG): container finished" podID="14f049a6-d608-4fd3-84ac-4c264cdfde1f" containerID="30aa4813f189755669b0bb66abe79f8f88dd01c806f999b32bc304b3e82b46ff" exitCode=0 Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.241288 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68df85789f-j6gch" event={"ID":"14f049a6-d608-4fd3-84ac-4c264cdfde1f","Type":"ContainerDied","Data":"30aa4813f189755669b0bb66abe79f8f88dd01c806f999b32bc304b3e82b46ff"} Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.345206 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.506483 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-config\") pod \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.506533 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-openstack-edpm-ipam\") pod \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.506631 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-dns-swift-storage-0\") pod \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.506703 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkhzm\" (UniqueName: \"kubernetes.io/projected/14f049a6-d608-4fd3-84ac-4c264cdfde1f-kube-api-access-lkhzm\") pod \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.506782 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-ovsdbserver-nb\") pod \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.506884 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-ovsdbserver-sb\") pod \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.506917 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-dns-svc\") pod \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.511965 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14f049a6-d608-4fd3-84ac-4c264cdfde1f-kube-api-access-lkhzm" (OuterVolumeSpecName: "kube-api-access-lkhzm") pod "14f049a6-d608-4fd3-84ac-4c264cdfde1f" (UID: "14f049a6-d608-4fd3-84ac-4c264cdfde1f"). InnerVolumeSpecName "kube-api-access-lkhzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.570402 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "14f049a6-d608-4fd3-84ac-4c264cdfde1f" (UID: "14f049a6-d608-4fd3-84ac-4c264cdfde1f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.577087 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "14f049a6-d608-4fd3-84ac-4c264cdfde1f" (UID: "14f049a6-d608-4fd3-84ac-4c264cdfde1f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.585259 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "14f049a6-d608-4fd3-84ac-4c264cdfde1f" (UID: "14f049a6-d608-4fd3-84ac-4c264cdfde1f"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:34:18 crc kubenswrapper[4809]: E0930 00:34:18.592970 4809 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-config podName:14f049a6-d608-4fd3-84ac-4c264cdfde1f nodeName:}" failed. No retries permitted until 2025-09-30 00:34:19.092943229 +0000 UTC m=+1510.129192637 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config" (UniqueName: "kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-config") pod "14f049a6-d608-4fd3-84ac-4c264cdfde1f" (UID: "14f049a6-d608-4fd3-84ac-4c264cdfde1f") : error deleting /var/lib/kubelet/pods/14f049a6-d608-4fd3-84ac-4c264cdfde1f/volume-subpaths: remove /var/lib/kubelet/pods/14f049a6-d608-4fd3-84ac-4c264cdfde1f/volume-subpaths: no such file or directory Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.593122 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "14f049a6-d608-4fd3-84ac-4c264cdfde1f" (UID: "14f049a6-d608-4fd3-84ac-4c264cdfde1f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.593158 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "14f049a6-d608-4fd3-84ac-4c264cdfde1f" (UID: "14f049a6-d608-4fd3-84ac-4c264cdfde1f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.608976 4809 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.609174 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkhzm\" (UniqueName: \"kubernetes.io/projected/14f049a6-d608-4fd3-84ac-4c264cdfde1f-kube-api-access-lkhzm\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.609237 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.609292 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.609367 4809 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:18 crc kubenswrapper[4809]: I0930 00:34:18.609431 4809 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:19 crc kubenswrapper[4809]: I0930 00:34:19.118491 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-config\") pod \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\" (UID: \"14f049a6-d608-4fd3-84ac-4c264cdfde1f\") " Sep 30 00:34:19 crc kubenswrapper[4809]: I0930 00:34:19.119098 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-config" (OuterVolumeSpecName: "config") pod "14f049a6-d608-4fd3-84ac-4c264cdfde1f" (UID: "14f049a6-d608-4fd3-84ac-4c264cdfde1f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:34:19 crc kubenswrapper[4809]: I0930 00:34:19.119472 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14f049a6-d608-4fd3-84ac-4c264cdfde1f-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:19 crc kubenswrapper[4809]: I0930 00:34:19.253538 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-6786b5c967-grl5d" event={"ID":"60c21ffa-346c-4c88-a449-a8b99906421e","Type":"ContainerStarted","Data":"f0d79d81f70ea3d5ec3af0a6b652d789f386b3509615c3d1547d87593696a62c"} Sep 30 00:34:19 crc kubenswrapper[4809]: I0930 00:34:19.253756 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:19 crc kubenswrapper[4809]: I0930 00:34:19.255433 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68df85789f-j6gch" event={"ID":"14f049a6-d608-4fd3-84ac-4c264cdfde1f","Type":"ContainerDied","Data":"9c58ee8013bf0301df3adffaeb2b1772bbe20a7ad8047633c848af0ce7b8599e"} Sep 30 00:34:19 crc kubenswrapper[4809]: I0930 00:34:19.255507 4809 scope.go:117] "RemoveContainer" containerID="30aa4813f189755669b0bb66abe79f8f88dd01c806f999b32bc304b3e82b46ff" Sep 30 00:34:19 crc kubenswrapper[4809]: I0930 00:34:19.255534 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68df85789f-j6gch" Sep 30 00:34:19 crc kubenswrapper[4809]: I0930 00:34:19.267316 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1","Type":"ContainerStarted","Data":"3ae14c17879d86886ed66292d338f448fc8841c116f99b7f9f5fe79afce22874"} Sep 30 00:34:19 crc kubenswrapper[4809]: I0930 00:34:19.270924 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-86f457b754-mllm6" event={"ID":"f0702a54-83ed-4e06-a9f6-91fd8c106cf4","Type":"ContainerStarted","Data":"6e4daba3f0c08875319118082a7588feecfeaa7342e14a116fc0e918bce8a55b"} Sep 30 00:34:19 crc kubenswrapper[4809]: I0930 00:34:19.271079 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:19 crc kubenswrapper[4809]: I0930 00:34:19.274553 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-6786b5c967-grl5d" podStartSLOduration=2.479650741 podStartE2EDuration="4.274529508s" podCreationTimestamp="2025-09-30 00:34:15 +0000 UTC" firstStartedPulling="2025-09-30 00:34:16.153727332 +0000 UTC m=+1507.189976740" lastFinishedPulling="2025-09-30 00:34:17.948606099 +0000 UTC m=+1508.984855507" observedRunningTime="2025-09-30 00:34:19.27128812 +0000 UTC m=+1510.307537568" watchObservedRunningTime="2025-09-30 00:34:19.274529508 +0000 UTC m=+1510.310778916" Sep 30 00:34:19 crc kubenswrapper[4809]: I0930 00:34:19.296250 4809 scope.go:117] "RemoveContainer" containerID="3a4b4929ab4169b178cb83c37f5c763049a009176fe604c3a885772dba89b93e" Sep 30 00:34:19 crc kubenswrapper[4809]: I0930 00:34:19.358182 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-86f457b754-mllm6" podStartSLOduration=2.558741571 podStartE2EDuration="4.35815914s" podCreationTimestamp="2025-09-30 00:34:15 +0000 UTC" firstStartedPulling="2025-09-30 00:34:16.149298283 +0000 UTC m=+1507.185547691" lastFinishedPulling="2025-09-30 00:34:17.948715852 +0000 UTC m=+1508.984965260" observedRunningTime="2025-09-30 00:34:19.300774608 +0000 UTC m=+1510.337024016" watchObservedRunningTime="2025-09-30 00:34:19.35815914 +0000 UTC m=+1510.394408548" Sep 30 00:34:19 crc kubenswrapper[4809]: I0930 00:34:19.370584 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.9886241890000003 podStartE2EDuration="46.370563477s" podCreationTimestamp="2025-09-30 00:33:33 +0000 UTC" firstStartedPulling="2025-09-30 00:33:34.579067467 +0000 UTC m=+1465.615316875" lastFinishedPulling="2025-09-30 00:34:17.961006755 +0000 UTC m=+1508.997256163" observedRunningTime="2025-09-30 00:34:19.343992757 +0000 UTC m=+1510.380242165" watchObservedRunningTime="2025-09-30 00:34:19.370563477 +0000 UTC m=+1510.406812885" Sep 30 00:34:19 crc kubenswrapper[4809]: I0930 00:34:19.388635 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68df85789f-j6gch"] Sep 30 00:34:19 crc kubenswrapper[4809]: I0930 00:34:19.398133 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-68df85789f-j6gch"] Sep 30 00:34:19 crc kubenswrapper[4809]: I0930 00:34:19.708467 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14f049a6-d608-4fd3-84ac-4c264cdfde1f" path="/var/lib/kubelet/pods/14f049a6-d608-4fd3-84ac-4c264cdfde1f/volumes" Sep 30 00:34:25 crc kubenswrapper[4809]: I0930 00:34:25.325314 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:34:25 crc kubenswrapper[4809]: I0930 00:34:25.325744 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:34:26 crc kubenswrapper[4809]: I0930 00:34:26.778873 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-86f457b754-mllm6" Sep 30 00:34:26 crc kubenswrapper[4809]: I0930 00:34:26.861051 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-c5d74679-msnb4"] Sep 30 00:34:26 crc kubenswrapper[4809]: I0930 00:34:26.861514 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-api-c5d74679-msnb4" podUID="b84f683b-2b54-4f11-a13a-104543f646a3" containerName="heat-api" containerID="cri-o://15c9ed5312385bab468671de140eaa891467369fef5ecfb341ed0f0bbbf52552" gracePeriod=60 Sep 30 00:34:27 crc kubenswrapper[4809]: I0930 00:34:27.029544 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-6786b5c967-grl5d" Sep 30 00:34:27 crc kubenswrapper[4809]: I0930 00:34:27.115443 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-75b57bc64b-sfpzl"] Sep 30 00:34:27 crc kubenswrapper[4809]: I0930 00:34:27.115720 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" podUID="cf6b2c7c-2b98-4fad-925a-86f18d1e7e42" containerName="heat-cfnapi" containerID="cri-o://28d28088a4a4dc0b27931fbd9ce3bde6090ed7cd1da8520133241733347a6722" gracePeriod=60 Sep 30 00:34:28 crc kubenswrapper[4809]: I0930 00:34:28.377744 4809 generic.go:334] "Generic (PLEG): container finished" podID="69e69cac-c659-4973-a220-82c222df7c35" containerID="57fe40d601c9d9899ecd694241d63149e4b7fd73f713fff47cac69618cdf3664" exitCode=0 Sep 30 00:34:28 crc kubenswrapper[4809]: I0930 00:34:28.377803 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"69e69cac-c659-4973-a220-82c222df7c35","Type":"ContainerDied","Data":"57fe40d601c9d9899ecd694241d63149e4b7fd73f713fff47cac69618cdf3664"} Sep 30 00:34:28 crc kubenswrapper[4809]: I0930 00:34:28.380764 4809 generic.go:334] "Generic (PLEG): container finished" podID="3e11f1c6-6535-453c-86b9-98c1ba7abd72" containerID="8f8bc09b7562f9a2d13b939d1ac956ee19710947ea79958bff186d81c18112ba" exitCode=0 Sep 30 00:34:28 crc kubenswrapper[4809]: I0930 00:34:28.380800 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3e11f1c6-6535-453c-86b9-98c1ba7abd72","Type":"ContainerDied","Data":"8f8bc09b7562f9a2d13b939d1ac956ee19710947ea79958bff186d81c18112ba"} Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.344799 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j"] Sep 30 00:34:29 crc kubenswrapper[4809]: E0930 00:34:29.345612 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14f049a6-d608-4fd3-84ac-4c264cdfde1f" containerName="init" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.345631 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="14f049a6-d608-4fd3-84ac-4c264cdfde1f" containerName="init" Sep 30 00:34:29 crc kubenswrapper[4809]: E0930 00:34:29.345690 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14f049a6-d608-4fd3-84ac-4c264cdfde1f" containerName="dnsmasq-dns" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.345700 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="14f049a6-d608-4fd3-84ac-4c264cdfde1f" containerName="dnsmasq-dns" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.345985 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="14f049a6-d608-4fd3-84ac-4c264cdfde1f" containerName="dnsmasq-dns" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.346927 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.354740 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.354871 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.355102 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.355733 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.394539 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3e11f1c6-6535-453c-86b9-98c1ba7abd72","Type":"ContainerStarted","Data":"a3a339b0c14727d4016974d919760f2e7b8f9907cb218c73f0e7bd13f65d38e1"} Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.394796 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.397073 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"69e69cac-c659-4973-a220-82c222df7c35","Type":"ContainerStarted","Data":"3fd20aa5899842972c52ab7dd4a11f3aa28da64978375024b8aa0d05f2779103"} Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.397981 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.416401 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=34.416379511 podStartE2EDuration="34.416379511s" podCreationTimestamp="2025-09-30 00:33:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:34:29.414601763 +0000 UTC m=+1520.450851171" watchObservedRunningTime="2025-09-30 00:34:29.416379511 +0000 UTC m=+1520.452628919" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.443354 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j"] Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.450337 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j\" (UID: \"a89a4bf7-7d14-4374-9de0-adf3c01c20e4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.450383 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j\" (UID: \"a89a4bf7-7d14-4374-9de0-adf3c01c20e4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.450562 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j\" (UID: \"a89a4bf7-7d14-4374-9de0-adf3c01c20e4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.450701 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l22ph\" (UniqueName: \"kubernetes.io/projected/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-kube-api-access-l22ph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j\" (UID: \"a89a4bf7-7d14-4374-9de0-adf3c01c20e4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.450874 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=34.450853914 podStartE2EDuration="34.450853914s" podCreationTimestamp="2025-09-30 00:33:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:34:29.443947527 +0000 UTC m=+1520.480196925" watchObservedRunningTime="2025-09-30 00:34:29.450853914 +0000 UTC m=+1520.487103322" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.552209 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l22ph\" (UniqueName: \"kubernetes.io/projected/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-kube-api-access-l22ph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j\" (UID: \"a89a4bf7-7d14-4374-9de0-adf3c01c20e4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.552527 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j\" (UID: \"a89a4bf7-7d14-4374-9de0-adf3c01c20e4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.552557 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j\" (UID: \"a89a4bf7-7d14-4374-9de0-adf3c01c20e4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.552640 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j\" (UID: \"a89a4bf7-7d14-4374-9de0-adf3c01c20e4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.558163 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j\" (UID: \"a89a4bf7-7d14-4374-9de0-adf3c01c20e4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.558446 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j\" (UID: \"a89a4bf7-7d14-4374-9de0-adf3c01c20e4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.559874 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j\" (UID: \"a89a4bf7-7d14-4374-9de0-adf3c01c20e4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.582041 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l22ph\" (UniqueName: \"kubernetes.io/projected/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-kube-api-access-l22ph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j\" (UID: \"a89a4bf7-7d14-4374-9de0-adf3c01c20e4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" Sep 30 00:34:29 crc kubenswrapper[4809]: I0930 00:34:29.672257 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" Sep 30 00:34:30 crc kubenswrapper[4809]: I0930 00:34:30.416146 4809 generic.go:334] "Generic (PLEG): container finished" podID="b84f683b-2b54-4f11-a13a-104543f646a3" containerID="15c9ed5312385bab468671de140eaa891467369fef5ecfb341ed0f0bbbf52552" exitCode=0 Sep 30 00:34:30 crc kubenswrapper[4809]: I0930 00:34:30.417918 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-c5d74679-msnb4" event={"ID":"b84f683b-2b54-4f11-a13a-104543f646a3","Type":"ContainerDied","Data":"15c9ed5312385bab468671de140eaa891467369fef5ecfb341ed0f0bbbf52552"} Sep 30 00:34:30 crc kubenswrapper[4809]: I0930 00:34:30.552578 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j"] Sep 30 00:34:30 crc kubenswrapper[4809]: I0930 00:34:30.626626 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 00:34:30 crc kubenswrapper[4809]: I0930 00:34:30.725863 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:34:30 crc kubenswrapper[4809]: I0930 00:34:30.896564 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zlkh4\" (UniqueName: \"kubernetes.io/projected/b84f683b-2b54-4f11-a13a-104543f646a3-kube-api-access-zlkh4\") pod \"b84f683b-2b54-4f11-a13a-104543f646a3\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " Sep 30 00:34:30 crc kubenswrapper[4809]: I0930 00:34:30.897945 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-config-data-custom\") pod \"b84f683b-2b54-4f11-a13a-104543f646a3\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " Sep 30 00:34:30 crc kubenswrapper[4809]: I0930 00:34:30.898167 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-public-tls-certs\") pod \"b84f683b-2b54-4f11-a13a-104543f646a3\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " Sep 30 00:34:30 crc kubenswrapper[4809]: I0930 00:34:30.898411 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-combined-ca-bundle\") pod \"b84f683b-2b54-4f11-a13a-104543f646a3\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " Sep 30 00:34:30 crc kubenswrapper[4809]: I0930 00:34:30.898556 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-config-data\") pod \"b84f683b-2b54-4f11-a13a-104543f646a3\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " Sep 30 00:34:30 crc kubenswrapper[4809]: I0930 00:34:30.898590 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-internal-tls-certs\") pod \"b84f683b-2b54-4f11-a13a-104543f646a3\" (UID: \"b84f683b-2b54-4f11-a13a-104543f646a3\") " Sep 30 00:34:30 crc kubenswrapper[4809]: I0930 00:34:30.911175 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b84f683b-2b54-4f11-a13a-104543f646a3" (UID: "b84f683b-2b54-4f11-a13a-104543f646a3"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:30 crc kubenswrapper[4809]: I0930 00:34:30.921795 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b84f683b-2b54-4f11-a13a-104543f646a3-kube-api-access-zlkh4" (OuterVolumeSpecName: "kube-api-access-zlkh4") pod "b84f683b-2b54-4f11-a13a-104543f646a3" (UID: "b84f683b-2b54-4f11-a13a-104543f646a3"). InnerVolumeSpecName "kube-api-access-zlkh4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:34:30 crc kubenswrapper[4809]: I0930 00:34:30.950287 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b84f683b-2b54-4f11-a13a-104543f646a3" (UID: "b84f683b-2b54-4f11-a13a-104543f646a3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.001564 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b84f683b-2b54-4f11-a13a-104543f646a3" (UID: "b84f683b-2b54-4f11-a13a-104543f646a3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.001934 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.001970 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zlkh4\" (UniqueName: \"kubernetes.io/projected/b84f683b-2b54-4f11-a13a-104543f646a3-kube-api-access-zlkh4\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.001983 4809 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.019937 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b84f683b-2b54-4f11-a13a-104543f646a3" (UID: "b84f683b-2b54-4f11-a13a-104543f646a3"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.078560 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-config-data" (OuterVolumeSpecName: "config-data") pod "b84f683b-2b54-4f11-a13a-104543f646a3" (UID: "b84f683b-2b54-4f11-a13a-104543f646a3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.106343 4809 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.106372 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.106380 4809 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b84f683b-2b54-4f11-a13a-104543f646a3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.189808 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.309724 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-combined-ca-bundle\") pod \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.309812 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-internal-tls-certs\") pod \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.309941 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-config-data\") pod \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.309966 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-public-tls-certs\") pod \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.310021 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-config-data-custom\") pod \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.310281 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bj4px\" (UniqueName: \"kubernetes.io/projected/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-kube-api-access-bj4px\") pod \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\" (UID: \"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42\") " Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.315242 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "cf6b2c7c-2b98-4fad-925a-86f18d1e7e42" (UID: "cf6b2c7c-2b98-4fad-925a-86f18d1e7e42"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.316569 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-kube-api-access-bj4px" (OuterVolumeSpecName: "kube-api-access-bj4px") pod "cf6b2c7c-2b98-4fad-925a-86f18d1e7e42" (UID: "cf6b2c7c-2b98-4fad-925a-86f18d1e7e42"). InnerVolumeSpecName "kube-api-access-bj4px". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.348282 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf6b2c7c-2b98-4fad-925a-86f18d1e7e42" (UID: "cf6b2c7c-2b98-4fad-925a-86f18d1e7e42"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.379269 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-config-data" (OuterVolumeSpecName: "config-data") pod "cf6b2c7c-2b98-4fad-925a-86f18d1e7e42" (UID: "cf6b2c7c-2b98-4fad-925a-86f18d1e7e42"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.391229 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "cf6b2c7c-2b98-4fad-925a-86f18d1e7e42" (UID: "cf6b2c7c-2b98-4fad-925a-86f18d1e7e42"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.392792 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "cf6b2c7c-2b98-4fad-925a-86f18d1e7e42" (UID: "cf6b2c7c-2b98-4fad-925a-86f18d1e7e42"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.413164 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bj4px\" (UniqueName: \"kubernetes.io/projected/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-kube-api-access-bj4px\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.413205 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.413220 4809 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.413232 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.413244 4809 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.413258 4809 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.430073 4809 generic.go:334] "Generic (PLEG): container finished" podID="cf6b2c7c-2b98-4fad-925a-86f18d1e7e42" containerID="28d28088a4a4dc0b27931fbd9ce3bde6090ed7cd1da8520133241733347a6722" exitCode=0 Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.431524 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" event={"ID":"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42","Type":"ContainerDied","Data":"28d28088a4a4dc0b27931fbd9ce3bde6090ed7cd1da8520133241733347a6722"} Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.431633 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" event={"ID":"cf6b2c7c-2b98-4fad-925a-86f18d1e7e42","Type":"ContainerDied","Data":"93e119bb006e2ea4d012fcda60b2fa58a452fdd94089b43138075f5cd46ab7a6"} Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.431766 4809 scope.go:117] "RemoveContainer" containerID="28d28088a4a4dc0b27931fbd9ce3bde6090ed7cd1da8520133241733347a6722" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.431999 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.456757 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-c5d74679-msnb4" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.458884 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-c5d74679-msnb4" event={"ID":"b84f683b-2b54-4f11-a13a-104543f646a3","Type":"ContainerDied","Data":"03d0600b1e39121f0ebdd8b8e8deca8e635583d2c58e6b145b30515b67ac1b8f"} Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.460448 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" event={"ID":"a89a4bf7-7d14-4374-9de0-adf3c01c20e4","Type":"ContainerStarted","Data":"55d7c95fb2201f9fdb132d6bbf0b558ab72ebe623465dcc96b31ef306bfa253f"} Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.497419 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-75b57bc64b-sfpzl"] Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.499252 4809 scope.go:117] "RemoveContainer" containerID="28d28088a4a4dc0b27931fbd9ce3bde6090ed7cd1da8520133241733347a6722" Sep 30 00:34:31 crc kubenswrapper[4809]: E0930 00:34:31.502820 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28d28088a4a4dc0b27931fbd9ce3bde6090ed7cd1da8520133241733347a6722\": container with ID starting with 28d28088a4a4dc0b27931fbd9ce3bde6090ed7cd1da8520133241733347a6722 not found: ID does not exist" containerID="28d28088a4a4dc0b27931fbd9ce3bde6090ed7cd1da8520133241733347a6722" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.502867 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28d28088a4a4dc0b27931fbd9ce3bde6090ed7cd1da8520133241733347a6722"} err="failed to get container status \"28d28088a4a4dc0b27931fbd9ce3bde6090ed7cd1da8520133241733347a6722\": rpc error: code = NotFound desc = could not find container \"28d28088a4a4dc0b27931fbd9ce3bde6090ed7cd1da8520133241733347a6722\": container with ID starting with 28d28088a4a4dc0b27931fbd9ce3bde6090ed7cd1da8520133241733347a6722 not found: ID does not exist" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.502903 4809 scope.go:117] "RemoveContainer" containerID="15c9ed5312385bab468671de140eaa891467369fef5ecfb341ed0f0bbbf52552" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.519148 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-cfnapi-75b57bc64b-sfpzl"] Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.535237 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-c5d74679-msnb4"] Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.555571 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-api-c5d74679-msnb4"] Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.710508 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b84f683b-2b54-4f11-a13a-104543f646a3" path="/var/lib/kubelet/pods/b84f683b-2b54-4f11-a13a-104543f646a3/volumes" Sep 30 00:34:31 crc kubenswrapper[4809]: I0930 00:34:31.711546 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf6b2c7c-2b98-4fad-925a-86f18d1e7e42" path="/var/lib/kubelet/pods/cf6b2c7c-2b98-4fad-925a-86f18d1e7e42/volumes" Sep 30 00:34:35 crc kubenswrapper[4809]: I0930 00:34:35.468787 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-6b6bcf86f-4vgsr" Sep 30 00:34:35 crc kubenswrapper[4809]: I0930 00:34:35.532831 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-engine-7c4c5654b8-svs85"] Sep 30 00:34:35 crc kubenswrapper[4809]: I0930 00:34:35.533302 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-engine-7c4c5654b8-svs85" podUID="e8a858b6-8b7c-4a4d-805f-d129492e0f57" containerName="heat-engine" containerID="cri-o://0096310072aa35a782406e714dbb8512f9ff7d94f7fe4733e322dd3a68ce3bf1" gracePeriod=60 Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.563229 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-2bnnc"] Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.575548 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-2bnnc"] Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.693747 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-5gd22"] Sep 30 00:34:38 crc kubenswrapper[4809]: E0930 00:34:38.694176 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf6b2c7c-2b98-4fad-925a-86f18d1e7e42" containerName="heat-cfnapi" Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.694195 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf6b2c7c-2b98-4fad-925a-86f18d1e7e42" containerName="heat-cfnapi" Sep 30 00:34:38 crc kubenswrapper[4809]: E0930 00:34:38.694231 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b84f683b-2b54-4f11-a13a-104543f646a3" containerName="heat-api" Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.694238 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="b84f683b-2b54-4f11-a13a-104543f646a3" containerName="heat-api" Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.694441 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="b84f683b-2b54-4f11-a13a-104543f646a3" containerName="heat-api" Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.694464 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf6b2c7c-2b98-4fad-925a-86f18d1e7e42" containerName="heat-cfnapi" Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.696140 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-5gd22" Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.712251 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-5gd22"] Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.785966 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzf6w\" (UniqueName: \"kubernetes.io/projected/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-kube-api-access-qzf6w\") pod \"aodh-db-sync-5gd22\" (UID: \"e9759b39-ef34-4f3a-bba2-bd1421fee4c6\") " pod="openstack/aodh-db-sync-5gd22" Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.786015 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-scripts\") pod \"aodh-db-sync-5gd22\" (UID: \"e9759b39-ef34-4f3a-bba2-bd1421fee4c6\") " pod="openstack/aodh-db-sync-5gd22" Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.786040 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-combined-ca-bundle\") pod \"aodh-db-sync-5gd22\" (UID: \"e9759b39-ef34-4f3a-bba2-bd1421fee4c6\") " pod="openstack/aodh-db-sync-5gd22" Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.786498 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-config-data\") pod \"aodh-db-sync-5gd22\" (UID: \"e9759b39-ef34-4f3a-bba2-bd1421fee4c6\") " pod="openstack/aodh-db-sync-5gd22" Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.889114 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-scripts\") pod \"aodh-db-sync-5gd22\" (UID: \"e9759b39-ef34-4f3a-bba2-bd1421fee4c6\") " pod="openstack/aodh-db-sync-5gd22" Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.889164 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-combined-ca-bundle\") pod \"aodh-db-sync-5gd22\" (UID: \"e9759b39-ef34-4f3a-bba2-bd1421fee4c6\") " pod="openstack/aodh-db-sync-5gd22" Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.889293 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-config-data\") pod \"aodh-db-sync-5gd22\" (UID: \"e9759b39-ef34-4f3a-bba2-bd1421fee4c6\") " pod="openstack/aodh-db-sync-5gd22" Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.889415 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzf6w\" (UniqueName: \"kubernetes.io/projected/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-kube-api-access-qzf6w\") pod \"aodh-db-sync-5gd22\" (UID: \"e9759b39-ef34-4f3a-bba2-bd1421fee4c6\") " pod="openstack/aodh-db-sync-5gd22" Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.893962 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-scripts\") pod \"aodh-db-sync-5gd22\" (UID: \"e9759b39-ef34-4f3a-bba2-bd1421fee4c6\") " pod="openstack/aodh-db-sync-5gd22" Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.896538 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-combined-ca-bundle\") pod \"aodh-db-sync-5gd22\" (UID: \"e9759b39-ef34-4f3a-bba2-bd1421fee4c6\") " pod="openstack/aodh-db-sync-5gd22" Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.902866 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-config-data\") pod \"aodh-db-sync-5gd22\" (UID: \"e9759b39-ef34-4f3a-bba2-bd1421fee4c6\") " pod="openstack/aodh-db-sync-5gd22" Sep 30 00:34:38 crc kubenswrapper[4809]: I0930 00:34:38.910610 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzf6w\" (UniqueName: \"kubernetes.io/projected/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-kube-api-access-qzf6w\") pod \"aodh-db-sync-5gd22\" (UID: \"e9759b39-ef34-4f3a-bba2-bd1421fee4c6\") " pod="openstack/aodh-db-sync-5gd22" Sep 30 00:34:39 crc kubenswrapper[4809]: I0930 00:34:39.020999 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-5gd22" Sep 30 00:34:39 crc kubenswrapper[4809]: E0930 00:34:39.062785 4809 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0096310072aa35a782406e714dbb8512f9ff7d94f7fe4733e322dd3a68ce3bf1" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Sep 30 00:34:39 crc kubenswrapper[4809]: E0930 00:34:39.065267 4809 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0096310072aa35a782406e714dbb8512f9ff7d94f7fe4733e322dd3a68ce3bf1" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Sep 30 00:34:39 crc kubenswrapper[4809]: E0930 00:34:39.066909 4809 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0096310072aa35a782406e714dbb8512f9ff7d94f7fe4733e322dd3a68ce3bf1" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Sep 30 00:34:39 crc kubenswrapper[4809]: E0930 00:34:39.066978 4809 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/heat-engine-7c4c5654b8-svs85" podUID="e8a858b6-8b7c-4a4d-805f-d129492e0f57" containerName="heat-engine" Sep 30 00:34:39 crc kubenswrapper[4809]: I0930 00:34:39.757022 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1120e47a-9490-497a-a79b-aa2941e070f7" path="/var/lib/kubelet/pods/1120e47a-9490-497a-a79b-aa2941e070f7/volumes" Sep 30 00:34:40 crc kubenswrapper[4809]: I0930 00:34:40.828709 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/heat-cfnapi-75b57bc64b-sfpzl" podUID="cf6b2c7c-2b98-4fad-925a-86f18d1e7e42" containerName="heat-cfnapi" probeResult="failure" output="Get \"https://10.217.0.201:8000/healthcheck\": dial tcp 10.217.0.201:8000: i/o timeout" Sep 30 00:34:41 crc kubenswrapper[4809]: I0930 00:34:41.495625 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-5gd22"] Sep 30 00:34:41 crc kubenswrapper[4809]: I0930 00:34:41.607664 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" event={"ID":"a89a4bf7-7d14-4374-9de0-adf3c01c20e4","Type":"ContainerStarted","Data":"18d3481267e8df2044d4d52ec730f4f1a9c45f2daa8162f645dcfc1857bfb831"} Sep 30 00:34:41 crc kubenswrapper[4809]: I0930 00:34:41.610341 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-5gd22" event={"ID":"e9759b39-ef34-4f3a-bba2-bd1421fee4c6","Type":"ContainerStarted","Data":"4562e94028060ba4fc104a39840da7714cf53fcc0b7f05423222130094f95855"} Sep 30 00:34:41 crc kubenswrapper[4809]: I0930 00:34:41.627409 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" podStartSLOduration=2.215222056 podStartE2EDuration="12.627394301s" podCreationTimestamp="2025-09-30 00:34:29 +0000 UTC" firstStartedPulling="2025-09-30 00:34:30.626422436 +0000 UTC m=+1521.662671844" lastFinishedPulling="2025-09-30 00:34:41.038594681 +0000 UTC m=+1532.074844089" observedRunningTime="2025-09-30 00:34:41.626339352 +0000 UTC m=+1532.662588770" watchObservedRunningTime="2025-09-30 00:34:41.627394301 +0000 UTC m=+1532.663643709" Sep 30 00:34:46 crc kubenswrapper[4809]: I0930 00:34:46.351942 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 30 00:34:46 crc kubenswrapper[4809]: I0930 00:34:46.643805 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 30 00:34:46 crc kubenswrapper[4809]: I0930 00:34:46.661019 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-5gd22" event={"ID":"e9759b39-ef34-4f3a-bba2-bd1421fee4c6","Type":"ContainerStarted","Data":"f054046a6250eb8ce1aa2af86898bf23a93b8382e08eeb551bc027c2d9b5c345"} Sep 30 00:34:46 crc kubenswrapper[4809]: I0930 00:34:46.693361 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-5gd22" podStartSLOduration=4.210447413 podStartE2EDuration="8.693342667s" podCreationTimestamp="2025-09-30 00:34:38 +0000 UTC" firstStartedPulling="2025-09-30 00:34:41.498407291 +0000 UTC m=+1532.534656699" lastFinishedPulling="2025-09-30 00:34:45.981302535 +0000 UTC m=+1537.017551953" observedRunningTime="2025-09-30 00:34:46.685595548 +0000 UTC m=+1537.721844976" watchObservedRunningTime="2025-09-30 00:34:46.693342667 +0000 UTC m=+1537.729592075" Sep 30 00:34:48 crc kubenswrapper[4809]: I0930 00:34:48.686475 4809 generic.go:334] "Generic (PLEG): container finished" podID="e8a858b6-8b7c-4a4d-805f-d129492e0f57" containerID="0096310072aa35a782406e714dbb8512f9ff7d94f7fe4733e322dd3a68ce3bf1" exitCode=0 Sep 30 00:34:48 crc kubenswrapper[4809]: I0930 00:34:48.687469 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-7c4c5654b8-svs85" event={"ID":"e8a858b6-8b7c-4a4d-805f-d129492e0f57","Type":"ContainerDied","Data":"0096310072aa35a782406e714dbb8512f9ff7d94f7fe4733e322dd3a68ce3bf1"} Sep 30 00:34:48 crc kubenswrapper[4809]: I0930 00:34:48.814261 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-7c4c5654b8-svs85" Sep 30 00:34:48 crc kubenswrapper[4809]: I0930 00:34:48.937879 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8a858b6-8b7c-4a4d-805f-d129492e0f57-config-data-custom\") pod \"e8a858b6-8b7c-4a4d-805f-d129492e0f57\" (UID: \"e8a858b6-8b7c-4a4d-805f-d129492e0f57\") " Sep 30 00:34:48 crc kubenswrapper[4809]: I0930 00:34:48.937996 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8a858b6-8b7c-4a4d-805f-d129492e0f57-combined-ca-bundle\") pod \"e8a858b6-8b7c-4a4d-805f-d129492e0f57\" (UID: \"e8a858b6-8b7c-4a4d-805f-d129492e0f57\") " Sep 30 00:34:48 crc kubenswrapper[4809]: I0930 00:34:48.938119 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8a858b6-8b7c-4a4d-805f-d129492e0f57-config-data\") pod \"e8a858b6-8b7c-4a4d-805f-d129492e0f57\" (UID: \"e8a858b6-8b7c-4a4d-805f-d129492e0f57\") " Sep 30 00:34:48 crc kubenswrapper[4809]: I0930 00:34:48.938210 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrzr8\" (UniqueName: \"kubernetes.io/projected/e8a858b6-8b7c-4a4d-805f-d129492e0f57-kube-api-access-lrzr8\") pod \"e8a858b6-8b7c-4a4d-805f-d129492e0f57\" (UID: \"e8a858b6-8b7c-4a4d-805f-d129492e0f57\") " Sep 30 00:34:48 crc kubenswrapper[4809]: I0930 00:34:48.949986 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8a858b6-8b7c-4a4d-805f-d129492e0f57-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e8a858b6-8b7c-4a4d-805f-d129492e0f57" (UID: "e8a858b6-8b7c-4a4d-805f-d129492e0f57"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:48 crc kubenswrapper[4809]: I0930 00:34:48.950009 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8a858b6-8b7c-4a4d-805f-d129492e0f57-kube-api-access-lrzr8" (OuterVolumeSpecName: "kube-api-access-lrzr8") pod "e8a858b6-8b7c-4a4d-805f-d129492e0f57" (UID: "e8a858b6-8b7c-4a4d-805f-d129492e0f57"). InnerVolumeSpecName "kube-api-access-lrzr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:34:48 crc kubenswrapper[4809]: I0930 00:34:48.984579 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8a858b6-8b7c-4a4d-805f-d129492e0f57-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e8a858b6-8b7c-4a4d-805f-d129492e0f57" (UID: "e8a858b6-8b7c-4a4d-805f-d129492e0f57"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:49 crc kubenswrapper[4809]: I0930 00:34:49.013963 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8a858b6-8b7c-4a4d-805f-d129492e0f57-config-data" (OuterVolumeSpecName: "config-data") pod "e8a858b6-8b7c-4a4d-805f-d129492e0f57" (UID: "e8a858b6-8b7c-4a4d-805f-d129492e0f57"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:49 crc kubenswrapper[4809]: I0930 00:34:49.040930 4809 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8a858b6-8b7c-4a4d-805f-d129492e0f57-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:49 crc kubenswrapper[4809]: I0930 00:34:49.040968 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8a858b6-8b7c-4a4d-805f-d129492e0f57-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:49 crc kubenswrapper[4809]: I0930 00:34:49.040982 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8a858b6-8b7c-4a4d-805f-d129492e0f57-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:49 crc kubenswrapper[4809]: I0930 00:34:49.040996 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrzr8\" (UniqueName: \"kubernetes.io/projected/e8a858b6-8b7c-4a4d-805f-d129492e0f57-kube-api-access-lrzr8\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:49 crc kubenswrapper[4809]: I0930 00:34:49.701605 4809 generic.go:334] "Generic (PLEG): container finished" podID="e9759b39-ef34-4f3a-bba2-bd1421fee4c6" containerID="f054046a6250eb8ce1aa2af86898bf23a93b8382e08eeb551bc027c2d9b5c345" exitCode=0 Sep 30 00:34:49 crc kubenswrapper[4809]: I0930 00:34:49.704983 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-7c4c5654b8-svs85" Sep 30 00:34:49 crc kubenswrapper[4809]: I0930 00:34:49.711193 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-5gd22" event={"ID":"e9759b39-ef34-4f3a-bba2-bd1421fee4c6","Type":"ContainerDied","Data":"f054046a6250eb8ce1aa2af86898bf23a93b8382e08eeb551bc027c2d9b5c345"} Sep 30 00:34:49 crc kubenswrapper[4809]: I0930 00:34:49.711275 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-7c4c5654b8-svs85" event={"ID":"e8a858b6-8b7c-4a4d-805f-d129492e0f57","Type":"ContainerDied","Data":"694d2454da6af0628f339c103a664053b9eda9edb903e28f81fb3dc8ba64c001"} Sep 30 00:34:49 crc kubenswrapper[4809]: I0930 00:34:49.711307 4809 scope.go:117] "RemoveContainer" containerID="0096310072aa35a782406e714dbb8512f9ff7d94f7fe4733e322dd3a68ce3bf1" Sep 30 00:34:49 crc kubenswrapper[4809]: I0930 00:34:49.754147 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-engine-7c4c5654b8-svs85"] Sep 30 00:34:49 crc kubenswrapper[4809]: I0930 00:34:49.762971 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-engine-7c4c5654b8-svs85"] Sep 30 00:34:51 crc kubenswrapper[4809]: I0930 00:34:51.222961 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-5gd22" Sep 30 00:34:51 crc kubenswrapper[4809]: I0930 00:34:51.289486 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzf6w\" (UniqueName: \"kubernetes.io/projected/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-kube-api-access-qzf6w\") pod \"e9759b39-ef34-4f3a-bba2-bd1421fee4c6\" (UID: \"e9759b39-ef34-4f3a-bba2-bd1421fee4c6\") " Sep 30 00:34:51 crc kubenswrapper[4809]: I0930 00:34:51.289539 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-config-data\") pod \"e9759b39-ef34-4f3a-bba2-bd1421fee4c6\" (UID: \"e9759b39-ef34-4f3a-bba2-bd1421fee4c6\") " Sep 30 00:34:51 crc kubenswrapper[4809]: I0930 00:34:51.289671 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-combined-ca-bundle\") pod \"e9759b39-ef34-4f3a-bba2-bd1421fee4c6\" (UID: \"e9759b39-ef34-4f3a-bba2-bd1421fee4c6\") " Sep 30 00:34:51 crc kubenswrapper[4809]: I0930 00:34:51.289886 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-scripts\") pod \"e9759b39-ef34-4f3a-bba2-bd1421fee4c6\" (UID: \"e9759b39-ef34-4f3a-bba2-bd1421fee4c6\") " Sep 30 00:34:51 crc kubenswrapper[4809]: I0930 00:34:51.303065 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-kube-api-access-qzf6w" (OuterVolumeSpecName: "kube-api-access-qzf6w") pod "e9759b39-ef34-4f3a-bba2-bd1421fee4c6" (UID: "e9759b39-ef34-4f3a-bba2-bd1421fee4c6"). InnerVolumeSpecName "kube-api-access-qzf6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:34:51 crc kubenswrapper[4809]: I0930 00:34:51.303550 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-scripts" (OuterVolumeSpecName: "scripts") pod "e9759b39-ef34-4f3a-bba2-bd1421fee4c6" (UID: "e9759b39-ef34-4f3a-bba2-bd1421fee4c6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:51 crc kubenswrapper[4809]: I0930 00:34:51.336079 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-config-data" (OuterVolumeSpecName: "config-data") pod "e9759b39-ef34-4f3a-bba2-bd1421fee4c6" (UID: "e9759b39-ef34-4f3a-bba2-bd1421fee4c6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:51 crc kubenswrapper[4809]: I0930 00:34:51.338702 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e9759b39-ef34-4f3a-bba2-bd1421fee4c6" (UID: "e9759b39-ef34-4f3a-bba2-bd1421fee4c6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:51 crc kubenswrapper[4809]: I0930 00:34:51.392514 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzf6w\" (UniqueName: \"kubernetes.io/projected/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-kube-api-access-qzf6w\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:51 crc kubenswrapper[4809]: I0930 00:34:51.392576 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:51 crc kubenswrapper[4809]: I0930 00:34:51.392587 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:51 crc kubenswrapper[4809]: I0930 00:34:51.392595 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9759b39-ef34-4f3a-bba2-bd1421fee4c6-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:51 crc kubenswrapper[4809]: I0930 00:34:51.718218 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8a858b6-8b7c-4a4d-805f-d129492e0f57" path="/var/lib/kubelet/pods/e8a858b6-8b7c-4a4d-805f-d129492e0f57/volumes" Sep 30 00:34:51 crc kubenswrapper[4809]: I0930 00:34:51.778171 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-5gd22" event={"ID":"e9759b39-ef34-4f3a-bba2-bd1421fee4c6","Type":"ContainerDied","Data":"4562e94028060ba4fc104a39840da7714cf53fcc0b7f05423222130094f95855"} Sep 30 00:34:51 crc kubenswrapper[4809]: I0930 00:34:51.778227 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4562e94028060ba4fc104a39840da7714cf53fcc0b7f05423222130094f95855" Sep 30 00:34:51 crc kubenswrapper[4809]: I0930 00:34:51.778231 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-5gd22" Sep 30 00:34:53 crc kubenswrapper[4809]: I0930 00:34:53.653137 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-0"] Sep 30 00:34:53 crc kubenswrapper[4809]: I0930 00:34:53.655228 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="369bb779-4353-4a71-b237-da228464a8ac" containerName="aodh-api" containerID="cri-o://d56920d393845d389638c59699fb8c6445a5f8bcc693fa087036cbb48e29eda9" gracePeriod=30 Sep 30 00:34:53 crc kubenswrapper[4809]: I0930 00:34:53.655662 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="369bb779-4353-4a71-b237-da228464a8ac" containerName="aodh-notifier" containerID="cri-o://ca4c76ef34090d4435bf17161899710f3abde8a9a8bb9ac0d8653218d64e8c73" gracePeriod=30 Sep 30 00:34:53 crc kubenswrapper[4809]: I0930 00:34:53.655756 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="369bb779-4353-4a71-b237-da228464a8ac" containerName="aodh-listener" containerID="cri-o://40d6724fabd9a6b6d92df7cbcf6829e0303ffaaa564de86aa450e282e36aee5c" gracePeriod=30 Sep 30 00:34:53 crc kubenswrapper[4809]: I0930 00:34:53.655676 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="369bb779-4353-4a71-b237-da228464a8ac" containerName="aodh-evaluator" containerID="cri-o://a6c9e186a65ee36f2620f2f0da8c22851cd07774de6b77a42a1a593316fce586" gracePeriod=30 Sep 30 00:34:54 crc kubenswrapper[4809]: I0930 00:34:54.819034 4809 generic.go:334] "Generic (PLEG): container finished" podID="369bb779-4353-4a71-b237-da228464a8ac" containerID="a6c9e186a65ee36f2620f2f0da8c22851cd07774de6b77a42a1a593316fce586" exitCode=0 Sep 30 00:34:54 crc kubenswrapper[4809]: I0930 00:34:54.819284 4809 generic.go:334] "Generic (PLEG): container finished" podID="369bb779-4353-4a71-b237-da228464a8ac" containerID="d56920d393845d389638c59699fb8c6445a5f8bcc693fa087036cbb48e29eda9" exitCode=0 Sep 30 00:34:54 crc kubenswrapper[4809]: I0930 00:34:54.819119 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"369bb779-4353-4a71-b237-da228464a8ac","Type":"ContainerDied","Data":"a6c9e186a65ee36f2620f2f0da8c22851cd07774de6b77a42a1a593316fce586"} Sep 30 00:34:54 crc kubenswrapper[4809]: I0930 00:34:54.819347 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"369bb779-4353-4a71-b237-da228464a8ac","Type":"ContainerDied","Data":"d56920d393845d389638c59699fb8c6445a5f8bcc693fa087036cbb48e29eda9"} Sep 30 00:34:54 crc kubenswrapper[4809]: I0930 00:34:54.822387 4809 generic.go:334] "Generic (PLEG): container finished" podID="a89a4bf7-7d14-4374-9de0-adf3c01c20e4" containerID="18d3481267e8df2044d4d52ec730f4f1a9c45f2daa8162f645dcfc1857bfb831" exitCode=0 Sep 30 00:34:54 crc kubenswrapper[4809]: I0930 00:34:54.822444 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" event={"ID":"a89a4bf7-7d14-4374-9de0-adf3c01c20e4","Type":"ContainerDied","Data":"18d3481267e8df2044d4d52ec730f4f1a9c45f2daa8162f645dcfc1857bfb831"} Sep 30 00:34:55 crc kubenswrapper[4809]: I0930 00:34:55.325085 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:34:55 crc kubenswrapper[4809]: I0930 00:34:55.325174 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:34:55 crc kubenswrapper[4809]: I0930 00:34:55.325220 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:34:55 crc kubenswrapper[4809]: I0930 00:34:55.326279 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:34:55 crc kubenswrapper[4809]: I0930 00:34:55.326375 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" gracePeriod=600 Sep 30 00:34:55 crc kubenswrapper[4809]: E0930 00:34:55.453247 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:34:55 crc kubenswrapper[4809]: I0930 00:34:55.835372 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" exitCode=0 Sep 30 00:34:55 crc kubenswrapper[4809]: I0930 00:34:55.835446 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac"} Sep 30 00:34:55 crc kubenswrapper[4809]: I0930 00:34:55.835518 4809 scope.go:117] "RemoveContainer" containerID="ea47a4425c6b80c1d2b34f75ad9b62ec24ab7c72e09d6e8962f4a70eaa824489" Sep 30 00:34:55 crc kubenswrapper[4809]: I0930 00:34:55.836264 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:34:55 crc kubenswrapper[4809]: E0930 00:34:55.836558 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.342066 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.406028 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l22ph\" (UniqueName: \"kubernetes.io/projected/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-kube-api-access-l22ph\") pod \"a89a4bf7-7d14-4374-9de0-adf3c01c20e4\" (UID: \"a89a4bf7-7d14-4374-9de0-adf3c01c20e4\") " Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.406160 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-inventory\") pod \"a89a4bf7-7d14-4374-9de0-adf3c01c20e4\" (UID: \"a89a4bf7-7d14-4374-9de0-adf3c01c20e4\") " Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.406196 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-ssh-key\") pod \"a89a4bf7-7d14-4374-9de0-adf3c01c20e4\" (UID: \"a89a4bf7-7d14-4374-9de0-adf3c01c20e4\") " Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.406247 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-repo-setup-combined-ca-bundle\") pod \"a89a4bf7-7d14-4374-9de0-adf3c01c20e4\" (UID: \"a89a4bf7-7d14-4374-9de0-adf3c01c20e4\") " Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.413178 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-kube-api-access-l22ph" (OuterVolumeSpecName: "kube-api-access-l22ph") pod "a89a4bf7-7d14-4374-9de0-adf3c01c20e4" (UID: "a89a4bf7-7d14-4374-9de0-adf3c01c20e4"). InnerVolumeSpecName "kube-api-access-l22ph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.419155 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "a89a4bf7-7d14-4374-9de0-adf3c01c20e4" (UID: "a89a4bf7-7d14-4374-9de0-adf3c01c20e4"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.438262 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a89a4bf7-7d14-4374-9de0-adf3c01c20e4" (UID: "a89a4bf7-7d14-4374-9de0-adf3c01c20e4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.467704 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-inventory" (OuterVolumeSpecName: "inventory") pod "a89a4bf7-7d14-4374-9de0-adf3c01c20e4" (UID: "a89a4bf7-7d14-4374-9de0-adf3c01c20e4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.509028 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.509161 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.509221 4809 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.509278 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l22ph\" (UniqueName: \"kubernetes.io/projected/a89a4bf7-7d14-4374-9de0-adf3c01c20e4-kube-api-access-l22ph\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.882153 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" event={"ID":"a89a4bf7-7d14-4374-9de0-adf3c01c20e4","Type":"ContainerDied","Data":"55d7c95fb2201f9fdb132d6bbf0b558ab72ebe623465dcc96b31ef306bfa253f"} Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.882461 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="55d7c95fb2201f9fdb132d6bbf0b558ab72ebe623465dcc96b31ef306bfa253f" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.882214 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.937594 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p"] Sep 30 00:34:56 crc kubenswrapper[4809]: E0930 00:34:56.938040 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a89a4bf7-7d14-4374-9de0-adf3c01c20e4" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.938061 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a89a4bf7-7d14-4374-9de0-adf3c01c20e4" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 00:34:56 crc kubenswrapper[4809]: E0930 00:34:56.938081 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9759b39-ef34-4f3a-bba2-bd1421fee4c6" containerName="aodh-db-sync" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.938089 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9759b39-ef34-4f3a-bba2-bd1421fee4c6" containerName="aodh-db-sync" Sep 30 00:34:56 crc kubenswrapper[4809]: E0930 00:34:56.938127 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8a858b6-8b7c-4a4d-805f-d129492e0f57" containerName="heat-engine" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.938134 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8a858b6-8b7c-4a4d-805f-d129492e0f57" containerName="heat-engine" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.938330 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8a858b6-8b7c-4a4d-805f-d129492e0f57" containerName="heat-engine" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.938347 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9759b39-ef34-4f3a-bba2-bd1421fee4c6" containerName="aodh-db-sync" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.938369 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="a89a4bf7-7d14-4374-9de0-adf3c01c20e4" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.939120 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.941783 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.942714 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.943167 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.944845 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:34:56 crc kubenswrapper[4809]: I0930 00:34:56.961989 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p"] Sep 30 00:34:57 crc kubenswrapper[4809]: I0930 00:34:57.019950 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/63766b50-2bbd-4532-89a3-83cc3b063a52-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p\" (UID: \"63766b50-2bbd-4532-89a3-83cc3b063a52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" Sep 30 00:34:57 crc kubenswrapper[4809]: I0930 00:34:57.020065 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/63766b50-2bbd-4532-89a3-83cc3b063a52-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p\" (UID: \"63766b50-2bbd-4532-89a3-83cc3b063a52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" Sep 30 00:34:57 crc kubenswrapper[4809]: I0930 00:34:57.020119 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hvt7\" (UniqueName: \"kubernetes.io/projected/63766b50-2bbd-4532-89a3-83cc3b063a52-kube-api-access-6hvt7\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p\" (UID: \"63766b50-2bbd-4532-89a3-83cc3b063a52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" Sep 30 00:34:57 crc kubenswrapper[4809]: I0930 00:34:57.020178 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63766b50-2bbd-4532-89a3-83cc3b063a52-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p\" (UID: \"63766b50-2bbd-4532-89a3-83cc3b063a52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" Sep 30 00:34:57 crc kubenswrapper[4809]: I0930 00:34:57.122602 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/63766b50-2bbd-4532-89a3-83cc3b063a52-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p\" (UID: \"63766b50-2bbd-4532-89a3-83cc3b063a52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" Sep 30 00:34:57 crc kubenswrapper[4809]: I0930 00:34:57.122698 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hvt7\" (UniqueName: \"kubernetes.io/projected/63766b50-2bbd-4532-89a3-83cc3b063a52-kube-api-access-6hvt7\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p\" (UID: \"63766b50-2bbd-4532-89a3-83cc3b063a52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" Sep 30 00:34:57 crc kubenswrapper[4809]: I0930 00:34:57.122766 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63766b50-2bbd-4532-89a3-83cc3b063a52-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p\" (UID: \"63766b50-2bbd-4532-89a3-83cc3b063a52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" Sep 30 00:34:57 crc kubenswrapper[4809]: I0930 00:34:57.122807 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/63766b50-2bbd-4532-89a3-83cc3b063a52-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p\" (UID: \"63766b50-2bbd-4532-89a3-83cc3b063a52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" Sep 30 00:34:57 crc kubenswrapper[4809]: I0930 00:34:57.127063 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/63766b50-2bbd-4532-89a3-83cc3b063a52-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p\" (UID: \"63766b50-2bbd-4532-89a3-83cc3b063a52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" Sep 30 00:34:57 crc kubenswrapper[4809]: I0930 00:34:57.127193 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63766b50-2bbd-4532-89a3-83cc3b063a52-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p\" (UID: \"63766b50-2bbd-4532-89a3-83cc3b063a52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" Sep 30 00:34:57 crc kubenswrapper[4809]: I0930 00:34:57.134126 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/63766b50-2bbd-4532-89a3-83cc3b063a52-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p\" (UID: \"63766b50-2bbd-4532-89a3-83cc3b063a52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" Sep 30 00:34:57 crc kubenswrapper[4809]: I0930 00:34:57.142179 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hvt7\" (UniqueName: \"kubernetes.io/projected/63766b50-2bbd-4532-89a3-83cc3b063a52-kube-api-access-6hvt7\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p\" (UID: \"63766b50-2bbd-4532-89a3-83cc3b063a52\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" Sep 30 00:34:57 crc kubenswrapper[4809]: I0930 00:34:57.258552 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" Sep 30 00:34:57 crc kubenswrapper[4809]: I0930 00:34:57.814932 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p"] Sep 30 00:34:57 crc kubenswrapper[4809]: W0930 00:34:57.816702 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63766b50_2bbd_4532_89a3_83cc3b063a52.slice/crio-10f8ce61afd928235e08c3881bbdd3274323ca1ac72ec93d0360a57d229b56bb WatchSource:0}: Error finding container 10f8ce61afd928235e08c3881bbdd3274323ca1ac72ec93d0360a57d229b56bb: Status 404 returned error can't find the container with id 10f8ce61afd928235e08c3881bbdd3274323ca1ac72ec93d0360a57d229b56bb Sep 30 00:34:57 crc kubenswrapper[4809]: I0930 00:34:57.895151 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" event={"ID":"63766b50-2bbd-4532-89a3-83cc3b063a52","Type":"ContainerStarted","Data":"10f8ce61afd928235e08c3881bbdd3274323ca1ac72ec93d0360a57d229b56bb"} Sep 30 00:34:57 crc kubenswrapper[4809]: I0930 00:34:57.898715 4809 generic.go:334] "Generic (PLEG): container finished" podID="369bb779-4353-4a71-b237-da228464a8ac" containerID="40d6724fabd9a6b6d92df7cbcf6829e0303ffaaa564de86aa450e282e36aee5c" exitCode=0 Sep 30 00:34:57 crc kubenswrapper[4809]: I0930 00:34:57.898777 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"369bb779-4353-4a71-b237-da228464a8ac","Type":"ContainerDied","Data":"40d6724fabd9a6b6d92df7cbcf6829e0303ffaaa564de86aa450e282e36aee5c"} Sep 30 00:34:58 crc kubenswrapper[4809]: I0930 00:34:58.910953 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" event={"ID":"63766b50-2bbd-4532-89a3-83cc3b063a52","Type":"ContainerStarted","Data":"0834ffc58f15979ef7c0585aabdb910499ca9f968ee4816334ef374874270d6c"} Sep 30 00:34:58 crc kubenswrapper[4809]: I0930 00:34:58.933058 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" podStartSLOduration=2.33629876 podStartE2EDuration="2.933023452s" podCreationTimestamp="2025-09-30 00:34:56 +0000 UTC" firstStartedPulling="2025-09-30 00:34:57.819700975 +0000 UTC m=+1548.855950393" lastFinishedPulling="2025-09-30 00:34:58.416425677 +0000 UTC m=+1549.452675085" observedRunningTime="2025-09-30 00:34:58.928609844 +0000 UTC m=+1549.964859292" watchObservedRunningTime="2025-09-30 00:34:58.933023452 +0000 UTC m=+1549.969272900" Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.656279 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.767591 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-scripts\") pod \"369bb779-4353-4a71-b237-da228464a8ac\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.767732 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-internal-tls-certs\") pod \"369bb779-4353-4a71-b237-da228464a8ac\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.767795 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-config-data\") pod \"369bb779-4353-4a71-b237-da228464a8ac\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.767932 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-combined-ca-bundle\") pod \"369bb779-4353-4a71-b237-da228464a8ac\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.768018 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-public-tls-certs\") pod \"369bb779-4353-4a71-b237-da228464a8ac\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.768069 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8dl9\" (UniqueName: \"kubernetes.io/projected/369bb779-4353-4a71-b237-da228464a8ac-kube-api-access-c8dl9\") pod \"369bb779-4353-4a71-b237-da228464a8ac\" (UID: \"369bb779-4353-4a71-b237-da228464a8ac\") " Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.778320 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/369bb779-4353-4a71-b237-da228464a8ac-kube-api-access-c8dl9" (OuterVolumeSpecName: "kube-api-access-c8dl9") pod "369bb779-4353-4a71-b237-da228464a8ac" (UID: "369bb779-4353-4a71-b237-da228464a8ac"). InnerVolumeSpecName "kube-api-access-c8dl9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.780265 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-scripts" (OuterVolumeSpecName: "scripts") pod "369bb779-4353-4a71-b237-da228464a8ac" (UID: "369bb779-4353-4a71-b237-da228464a8ac"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.850197 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "369bb779-4353-4a71-b237-da228464a8ac" (UID: "369bb779-4353-4a71-b237-da228464a8ac"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.870839 4809 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.871149 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8dl9\" (UniqueName: \"kubernetes.io/projected/369bb779-4353-4a71-b237-da228464a8ac-kube-api-access-c8dl9\") on node \"crc\" DevicePath \"\"" Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.871165 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.873167 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "369bb779-4353-4a71-b237-da228464a8ac" (UID: "369bb779-4353-4a71-b237-da228464a8ac"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.901488 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "369bb779-4353-4a71-b237-da228464a8ac" (UID: "369bb779-4353-4a71-b237-da228464a8ac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.902975 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-config-data" (OuterVolumeSpecName: "config-data") pod "369bb779-4353-4a71-b237-da228464a8ac" (UID: "369bb779-4353-4a71-b237-da228464a8ac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.972554 4809 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.972791 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.972878 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/369bb779-4353-4a71-b237-da228464a8ac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.983298 4809 generic.go:334] "Generic (PLEG): container finished" podID="369bb779-4353-4a71-b237-da228464a8ac" containerID="ca4c76ef34090d4435bf17161899710f3abde8a9a8bb9ac0d8653218d64e8c73" exitCode=0 Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.983341 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"369bb779-4353-4a71-b237-da228464a8ac","Type":"ContainerDied","Data":"ca4c76ef34090d4435bf17161899710f3abde8a9a8bb9ac0d8653218d64e8c73"} Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.983367 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"369bb779-4353-4a71-b237-da228464a8ac","Type":"ContainerDied","Data":"8da5a92395c698976ed4c1e5b8abdec583362e27712d0e0f0727e36f58e50bd4"} Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.983382 4809 scope.go:117] "RemoveContainer" containerID="40d6724fabd9a6b6d92df7cbcf6829e0303ffaaa564de86aa450e282e36aee5c" Sep 30 00:35:03 crc kubenswrapper[4809]: I0930 00:35:03.983505 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.016603 4809 scope.go:117] "RemoveContainer" containerID="ca4c76ef34090d4435bf17161899710f3abde8a9a8bb9ac0d8653218d64e8c73" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.023847 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-0"] Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.035177 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-0"] Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.048068 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Sep 30 00:35:04 crc kubenswrapper[4809]: E0930 00:35:04.048774 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="369bb779-4353-4a71-b237-da228464a8ac" containerName="aodh-api" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.048799 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="369bb779-4353-4a71-b237-da228464a8ac" containerName="aodh-api" Sep 30 00:35:04 crc kubenswrapper[4809]: E0930 00:35:04.048835 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="369bb779-4353-4a71-b237-da228464a8ac" containerName="aodh-evaluator" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.048849 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="369bb779-4353-4a71-b237-da228464a8ac" containerName="aodh-evaluator" Sep 30 00:35:04 crc kubenswrapper[4809]: E0930 00:35:04.048876 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="369bb779-4353-4a71-b237-da228464a8ac" containerName="aodh-notifier" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.048889 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="369bb779-4353-4a71-b237-da228464a8ac" containerName="aodh-notifier" Sep 30 00:35:04 crc kubenswrapper[4809]: E0930 00:35:04.048915 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="369bb779-4353-4a71-b237-da228464a8ac" containerName="aodh-listener" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.048927 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="369bb779-4353-4a71-b237-da228464a8ac" containerName="aodh-listener" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.049267 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="369bb779-4353-4a71-b237-da228464a8ac" containerName="aodh-notifier" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.049295 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="369bb779-4353-4a71-b237-da228464a8ac" containerName="aodh-listener" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.049326 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="369bb779-4353-4a71-b237-da228464a8ac" containerName="aodh-api" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.049348 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="369bb779-4353-4a71-b237-da228464a8ac" containerName="aodh-evaluator" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.052301 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.054879 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-aodh-internal-svc" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.055112 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-vzdp5" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.055261 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-aodh-public-svc" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.055449 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.059764 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.059799 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.062731 4809 scope.go:117] "RemoveContainer" containerID="a6c9e186a65ee36f2620f2f0da8c22851cd07774de6b77a42a1a593316fce586" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.159312 4809 scope.go:117] "RemoveContainer" containerID="d56920d393845d389638c59699fb8c6445a5f8bcc693fa087036cbb48e29eda9" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.176430 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a58ca7e-5f56-4340-85ba-16c0952512ae-public-tls-certs\") pod \"aodh-0\" (UID: \"4a58ca7e-5f56-4340-85ba-16c0952512ae\") " pod="openstack/aodh-0" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.176502 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-td88f\" (UniqueName: \"kubernetes.io/projected/4a58ca7e-5f56-4340-85ba-16c0952512ae-kube-api-access-td88f\") pod \"aodh-0\" (UID: \"4a58ca7e-5f56-4340-85ba-16c0952512ae\") " pod="openstack/aodh-0" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.176541 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a58ca7e-5f56-4340-85ba-16c0952512ae-scripts\") pod \"aodh-0\" (UID: \"4a58ca7e-5f56-4340-85ba-16c0952512ae\") " pod="openstack/aodh-0" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.176571 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a58ca7e-5f56-4340-85ba-16c0952512ae-combined-ca-bundle\") pod \"aodh-0\" (UID: \"4a58ca7e-5f56-4340-85ba-16c0952512ae\") " pod="openstack/aodh-0" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.176737 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a58ca7e-5f56-4340-85ba-16c0952512ae-internal-tls-certs\") pod \"aodh-0\" (UID: \"4a58ca7e-5f56-4340-85ba-16c0952512ae\") " pod="openstack/aodh-0" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.177101 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a58ca7e-5f56-4340-85ba-16c0952512ae-config-data\") pod \"aodh-0\" (UID: \"4a58ca7e-5f56-4340-85ba-16c0952512ae\") " pod="openstack/aodh-0" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.184016 4809 scope.go:117] "RemoveContainer" containerID="40d6724fabd9a6b6d92df7cbcf6829e0303ffaaa564de86aa450e282e36aee5c" Sep 30 00:35:04 crc kubenswrapper[4809]: E0930 00:35:04.184924 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40d6724fabd9a6b6d92df7cbcf6829e0303ffaaa564de86aa450e282e36aee5c\": container with ID starting with 40d6724fabd9a6b6d92df7cbcf6829e0303ffaaa564de86aa450e282e36aee5c not found: ID does not exist" containerID="40d6724fabd9a6b6d92df7cbcf6829e0303ffaaa564de86aa450e282e36aee5c" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.184958 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40d6724fabd9a6b6d92df7cbcf6829e0303ffaaa564de86aa450e282e36aee5c"} err="failed to get container status \"40d6724fabd9a6b6d92df7cbcf6829e0303ffaaa564de86aa450e282e36aee5c\": rpc error: code = NotFound desc = could not find container \"40d6724fabd9a6b6d92df7cbcf6829e0303ffaaa564de86aa450e282e36aee5c\": container with ID starting with 40d6724fabd9a6b6d92df7cbcf6829e0303ffaaa564de86aa450e282e36aee5c not found: ID does not exist" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.184982 4809 scope.go:117] "RemoveContainer" containerID="ca4c76ef34090d4435bf17161899710f3abde8a9a8bb9ac0d8653218d64e8c73" Sep 30 00:35:04 crc kubenswrapper[4809]: E0930 00:35:04.185343 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca4c76ef34090d4435bf17161899710f3abde8a9a8bb9ac0d8653218d64e8c73\": container with ID starting with ca4c76ef34090d4435bf17161899710f3abde8a9a8bb9ac0d8653218d64e8c73 not found: ID does not exist" containerID="ca4c76ef34090d4435bf17161899710f3abde8a9a8bb9ac0d8653218d64e8c73" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.185372 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca4c76ef34090d4435bf17161899710f3abde8a9a8bb9ac0d8653218d64e8c73"} err="failed to get container status \"ca4c76ef34090d4435bf17161899710f3abde8a9a8bb9ac0d8653218d64e8c73\": rpc error: code = NotFound desc = could not find container \"ca4c76ef34090d4435bf17161899710f3abde8a9a8bb9ac0d8653218d64e8c73\": container with ID starting with ca4c76ef34090d4435bf17161899710f3abde8a9a8bb9ac0d8653218d64e8c73 not found: ID does not exist" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.185388 4809 scope.go:117] "RemoveContainer" containerID="a6c9e186a65ee36f2620f2f0da8c22851cd07774de6b77a42a1a593316fce586" Sep 30 00:35:04 crc kubenswrapper[4809]: E0930 00:35:04.185630 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6c9e186a65ee36f2620f2f0da8c22851cd07774de6b77a42a1a593316fce586\": container with ID starting with a6c9e186a65ee36f2620f2f0da8c22851cd07774de6b77a42a1a593316fce586 not found: ID does not exist" containerID="a6c9e186a65ee36f2620f2f0da8c22851cd07774de6b77a42a1a593316fce586" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.185792 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6c9e186a65ee36f2620f2f0da8c22851cd07774de6b77a42a1a593316fce586"} err="failed to get container status \"a6c9e186a65ee36f2620f2f0da8c22851cd07774de6b77a42a1a593316fce586\": rpc error: code = NotFound desc = could not find container \"a6c9e186a65ee36f2620f2f0da8c22851cd07774de6b77a42a1a593316fce586\": container with ID starting with a6c9e186a65ee36f2620f2f0da8c22851cd07774de6b77a42a1a593316fce586 not found: ID does not exist" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.185813 4809 scope.go:117] "RemoveContainer" containerID="d56920d393845d389638c59699fb8c6445a5f8bcc693fa087036cbb48e29eda9" Sep 30 00:35:04 crc kubenswrapper[4809]: E0930 00:35:04.186226 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d56920d393845d389638c59699fb8c6445a5f8bcc693fa087036cbb48e29eda9\": container with ID starting with d56920d393845d389638c59699fb8c6445a5f8bcc693fa087036cbb48e29eda9 not found: ID does not exist" containerID="d56920d393845d389638c59699fb8c6445a5f8bcc693fa087036cbb48e29eda9" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.186267 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d56920d393845d389638c59699fb8c6445a5f8bcc693fa087036cbb48e29eda9"} err="failed to get container status \"d56920d393845d389638c59699fb8c6445a5f8bcc693fa087036cbb48e29eda9\": rpc error: code = NotFound desc = could not find container \"d56920d393845d389638c59699fb8c6445a5f8bcc693fa087036cbb48e29eda9\": container with ID starting with d56920d393845d389638c59699fb8c6445a5f8bcc693fa087036cbb48e29eda9 not found: ID does not exist" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.278965 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a58ca7e-5f56-4340-85ba-16c0952512ae-config-data\") pod \"aodh-0\" (UID: \"4a58ca7e-5f56-4340-85ba-16c0952512ae\") " pod="openstack/aodh-0" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.279050 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a58ca7e-5f56-4340-85ba-16c0952512ae-public-tls-certs\") pod \"aodh-0\" (UID: \"4a58ca7e-5f56-4340-85ba-16c0952512ae\") " pod="openstack/aodh-0" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.279108 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-td88f\" (UniqueName: \"kubernetes.io/projected/4a58ca7e-5f56-4340-85ba-16c0952512ae-kube-api-access-td88f\") pod \"aodh-0\" (UID: \"4a58ca7e-5f56-4340-85ba-16c0952512ae\") " pod="openstack/aodh-0" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.279137 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a58ca7e-5f56-4340-85ba-16c0952512ae-scripts\") pod \"aodh-0\" (UID: \"4a58ca7e-5f56-4340-85ba-16c0952512ae\") " pod="openstack/aodh-0" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.279173 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a58ca7e-5f56-4340-85ba-16c0952512ae-combined-ca-bundle\") pod \"aodh-0\" (UID: \"4a58ca7e-5f56-4340-85ba-16c0952512ae\") " pod="openstack/aodh-0" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.279207 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a58ca7e-5f56-4340-85ba-16c0952512ae-internal-tls-certs\") pod \"aodh-0\" (UID: \"4a58ca7e-5f56-4340-85ba-16c0952512ae\") " pod="openstack/aodh-0" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.283106 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a58ca7e-5f56-4340-85ba-16c0952512ae-config-data\") pod \"aodh-0\" (UID: \"4a58ca7e-5f56-4340-85ba-16c0952512ae\") " pod="openstack/aodh-0" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.283272 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a58ca7e-5f56-4340-85ba-16c0952512ae-scripts\") pod \"aodh-0\" (UID: \"4a58ca7e-5f56-4340-85ba-16c0952512ae\") " pod="openstack/aodh-0" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.283298 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a58ca7e-5f56-4340-85ba-16c0952512ae-public-tls-certs\") pod \"aodh-0\" (UID: \"4a58ca7e-5f56-4340-85ba-16c0952512ae\") " pod="openstack/aodh-0" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.283942 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a58ca7e-5f56-4340-85ba-16c0952512ae-combined-ca-bundle\") pod \"aodh-0\" (UID: \"4a58ca7e-5f56-4340-85ba-16c0952512ae\") " pod="openstack/aodh-0" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.289438 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a58ca7e-5f56-4340-85ba-16c0952512ae-internal-tls-certs\") pod \"aodh-0\" (UID: \"4a58ca7e-5f56-4340-85ba-16c0952512ae\") " pod="openstack/aodh-0" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.296120 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-td88f\" (UniqueName: \"kubernetes.io/projected/4a58ca7e-5f56-4340-85ba-16c0952512ae-kube-api-access-td88f\") pod \"aodh-0\" (UID: \"4a58ca7e-5f56-4340-85ba-16c0952512ae\") " pod="openstack/aodh-0" Sep 30 00:35:04 crc kubenswrapper[4809]: I0930 00:35:04.389979 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Sep 30 00:35:05 crc kubenswrapper[4809]: I0930 00:35:05.035162 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Sep 30 00:35:05 crc kubenswrapper[4809]: I0930 00:35:05.715439 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="369bb779-4353-4a71-b237-da228464a8ac" path="/var/lib/kubelet/pods/369bb779-4353-4a71-b237-da228464a8ac/volumes" Sep 30 00:35:06 crc kubenswrapper[4809]: I0930 00:35:06.040292 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"4a58ca7e-5f56-4340-85ba-16c0952512ae","Type":"ContainerStarted","Data":"377570c9425e0e14dae66b0814ec6c3b9cabed2bf6ea01ace26ba2ca1d2c669d"} Sep 30 00:35:06 crc kubenswrapper[4809]: I0930 00:35:06.040339 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"4a58ca7e-5f56-4340-85ba-16c0952512ae","Type":"ContainerStarted","Data":"ad3154afff8afdf2f436cd92fbda5b4578ac7467f85f162f1ff5746d24a8007e"} Sep 30 00:35:07 crc kubenswrapper[4809]: I0930 00:35:07.080850 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"4a58ca7e-5f56-4340-85ba-16c0952512ae","Type":"ContainerStarted","Data":"9a88a5af4cefbdddef41581f511112bddc1da4c1e23c1de3b00f9921ddf01af2"} Sep 30 00:35:08 crc kubenswrapper[4809]: I0930 00:35:08.097057 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"4a58ca7e-5f56-4340-85ba-16c0952512ae","Type":"ContainerStarted","Data":"036ae0289eb80598d4915a4ad594bf9cf5c232851a8fdb0c277fe6f65a0e66a9"} Sep 30 00:35:09 crc kubenswrapper[4809]: I0930 00:35:09.109771 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"4a58ca7e-5f56-4340-85ba-16c0952512ae","Type":"ContainerStarted","Data":"ecf6c7b00d8df7777e207fe0a2afa0c73021960deb50a41df1aa23557452166f"} Sep 30 00:35:09 crc kubenswrapper[4809]: I0930 00:35:09.152002 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=1.620590937 podStartE2EDuration="5.151978991s" podCreationTimestamp="2025-09-30 00:35:04 +0000 UTC" firstStartedPulling="2025-09-30 00:35:05.040602637 +0000 UTC m=+1556.076852055" lastFinishedPulling="2025-09-30 00:35:08.571990701 +0000 UTC m=+1559.608240109" observedRunningTime="2025-09-30 00:35:09.129255557 +0000 UTC m=+1560.165504985" watchObservedRunningTime="2025-09-30 00:35:09.151978991 +0000 UTC m=+1560.188228389" Sep 30 00:35:10 crc kubenswrapper[4809]: I0930 00:35:10.691449 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:35:10 crc kubenswrapper[4809]: E0930 00:35:10.692995 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:35:11 crc kubenswrapper[4809]: I0930 00:35:11.930884 4809 scope.go:117] "RemoveContainer" containerID="d468d242c880b6878e8f06191fbe849fca2c5010b6e3feb8f14bfee7c5b70917" Sep 30 00:35:24 crc kubenswrapper[4809]: I0930 00:35:24.690457 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:35:24 crc kubenswrapper[4809]: E0930 00:35:24.691104 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:35:35 crc kubenswrapper[4809]: I0930 00:35:35.691724 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:35:35 crc kubenswrapper[4809]: E0930 00:35:35.692455 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:35:48 crc kubenswrapper[4809]: I0930 00:35:48.691519 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:35:48 crc kubenswrapper[4809]: E0930 00:35:48.692187 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:36:01 crc kubenswrapper[4809]: I0930 00:36:01.690563 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:36:01 crc kubenswrapper[4809]: E0930 00:36:01.692368 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:36:12 crc kubenswrapper[4809]: I0930 00:36:12.080462 4809 scope.go:117] "RemoveContainer" containerID="8b626b4d4e767700ac33232df90ec4d60f87c18ee0c45a30f5c6c8ddb44d31a0" Sep 30 00:36:12 crc kubenswrapper[4809]: I0930 00:36:12.126841 4809 scope.go:117] "RemoveContainer" containerID="db7fc80ecb951daceaff02d4e2289f2c318240092d3e9daa10d8fabdab4c79f2" Sep 30 00:36:12 crc kubenswrapper[4809]: I0930 00:36:12.161512 4809 scope.go:117] "RemoveContainer" containerID="c29b52b7bab2bd667816b074738a33d535361b5debe384c0b591cd5211cd0deb" Sep 30 00:36:12 crc kubenswrapper[4809]: I0930 00:36:12.185909 4809 scope.go:117] "RemoveContainer" containerID="bb10db75c5472efda1299a017e52c1ba85a1abafb9f99878972a3dfe411a0f8d" Sep 30 00:36:15 crc kubenswrapper[4809]: I0930 00:36:15.691800 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:36:15 crc kubenswrapper[4809]: E0930 00:36:15.692533 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:36:24 crc kubenswrapper[4809]: I0930 00:36:24.830130 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rfldr"] Sep 30 00:36:24 crc kubenswrapper[4809]: I0930 00:36:24.832995 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rfldr" Sep 30 00:36:24 crc kubenswrapper[4809]: I0930 00:36:24.842485 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rfldr"] Sep 30 00:36:24 crc kubenswrapper[4809]: I0930 00:36:24.966065 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca19c567-76df-4a04-b21f-79ae97f8fa4e-catalog-content\") pod \"redhat-operators-rfldr\" (UID: \"ca19c567-76df-4a04-b21f-79ae97f8fa4e\") " pod="openshift-marketplace/redhat-operators-rfldr" Sep 30 00:36:24 crc kubenswrapper[4809]: I0930 00:36:24.966171 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfcnb\" (UniqueName: \"kubernetes.io/projected/ca19c567-76df-4a04-b21f-79ae97f8fa4e-kube-api-access-pfcnb\") pod \"redhat-operators-rfldr\" (UID: \"ca19c567-76df-4a04-b21f-79ae97f8fa4e\") " pod="openshift-marketplace/redhat-operators-rfldr" Sep 30 00:36:24 crc kubenswrapper[4809]: I0930 00:36:24.966238 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca19c567-76df-4a04-b21f-79ae97f8fa4e-utilities\") pod \"redhat-operators-rfldr\" (UID: \"ca19c567-76df-4a04-b21f-79ae97f8fa4e\") " pod="openshift-marketplace/redhat-operators-rfldr" Sep 30 00:36:25 crc kubenswrapper[4809]: I0930 00:36:25.068248 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca19c567-76df-4a04-b21f-79ae97f8fa4e-utilities\") pod \"redhat-operators-rfldr\" (UID: \"ca19c567-76df-4a04-b21f-79ae97f8fa4e\") " pod="openshift-marketplace/redhat-operators-rfldr" Sep 30 00:36:25 crc kubenswrapper[4809]: I0930 00:36:25.068403 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca19c567-76df-4a04-b21f-79ae97f8fa4e-catalog-content\") pod \"redhat-operators-rfldr\" (UID: \"ca19c567-76df-4a04-b21f-79ae97f8fa4e\") " pod="openshift-marketplace/redhat-operators-rfldr" Sep 30 00:36:25 crc kubenswrapper[4809]: I0930 00:36:25.068487 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfcnb\" (UniqueName: \"kubernetes.io/projected/ca19c567-76df-4a04-b21f-79ae97f8fa4e-kube-api-access-pfcnb\") pod \"redhat-operators-rfldr\" (UID: \"ca19c567-76df-4a04-b21f-79ae97f8fa4e\") " pod="openshift-marketplace/redhat-operators-rfldr" Sep 30 00:36:25 crc kubenswrapper[4809]: I0930 00:36:25.069271 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca19c567-76df-4a04-b21f-79ae97f8fa4e-catalog-content\") pod \"redhat-operators-rfldr\" (UID: \"ca19c567-76df-4a04-b21f-79ae97f8fa4e\") " pod="openshift-marketplace/redhat-operators-rfldr" Sep 30 00:36:25 crc kubenswrapper[4809]: I0930 00:36:25.069352 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca19c567-76df-4a04-b21f-79ae97f8fa4e-utilities\") pod \"redhat-operators-rfldr\" (UID: \"ca19c567-76df-4a04-b21f-79ae97f8fa4e\") " pod="openshift-marketplace/redhat-operators-rfldr" Sep 30 00:36:25 crc kubenswrapper[4809]: I0930 00:36:25.091333 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfcnb\" (UniqueName: \"kubernetes.io/projected/ca19c567-76df-4a04-b21f-79ae97f8fa4e-kube-api-access-pfcnb\") pod \"redhat-operators-rfldr\" (UID: \"ca19c567-76df-4a04-b21f-79ae97f8fa4e\") " pod="openshift-marketplace/redhat-operators-rfldr" Sep 30 00:36:25 crc kubenswrapper[4809]: I0930 00:36:25.154032 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rfldr" Sep 30 00:36:25 crc kubenswrapper[4809]: I0930 00:36:25.669766 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rfldr"] Sep 30 00:36:25 crc kubenswrapper[4809]: I0930 00:36:25.968071 4809 generic.go:334] "Generic (PLEG): container finished" podID="ca19c567-76df-4a04-b21f-79ae97f8fa4e" containerID="cce76b81465bfc55d06be8275cb5f7cfe05ab7ffeebd462fe4347302550ab13a" exitCode=0 Sep 30 00:36:25 crc kubenswrapper[4809]: I0930 00:36:25.968118 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rfldr" event={"ID":"ca19c567-76df-4a04-b21f-79ae97f8fa4e","Type":"ContainerDied","Data":"cce76b81465bfc55d06be8275cb5f7cfe05ab7ffeebd462fe4347302550ab13a"} Sep 30 00:36:25 crc kubenswrapper[4809]: I0930 00:36:25.968148 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rfldr" event={"ID":"ca19c567-76df-4a04-b21f-79ae97f8fa4e","Type":"ContainerStarted","Data":"d1ac62dc7d8ae6b3ed1fa0d63d1e7a42f44e3146cf431fc6fced95765e09abb3"} Sep 30 00:36:26 crc kubenswrapper[4809]: I0930 00:36:26.691180 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:36:26 crc kubenswrapper[4809]: E0930 00:36:26.691715 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:36:27 crc kubenswrapper[4809]: I0930 00:36:27.011344 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rfldr" event={"ID":"ca19c567-76df-4a04-b21f-79ae97f8fa4e","Type":"ContainerStarted","Data":"b4967ede97f1fbc44ac16dbd9a2def39872f875d59ba2dc9d703122d3cb15615"} Sep 30 00:36:30 crc kubenswrapper[4809]: I0930 00:36:30.044270 4809 generic.go:334] "Generic (PLEG): container finished" podID="ca19c567-76df-4a04-b21f-79ae97f8fa4e" containerID="b4967ede97f1fbc44ac16dbd9a2def39872f875d59ba2dc9d703122d3cb15615" exitCode=0 Sep 30 00:36:30 crc kubenswrapper[4809]: I0930 00:36:30.044337 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rfldr" event={"ID":"ca19c567-76df-4a04-b21f-79ae97f8fa4e","Type":"ContainerDied","Data":"b4967ede97f1fbc44ac16dbd9a2def39872f875d59ba2dc9d703122d3cb15615"} Sep 30 00:36:31 crc kubenswrapper[4809]: I0930 00:36:31.058916 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rfldr" event={"ID":"ca19c567-76df-4a04-b21f-79ae97f8fa4e","Type":"ContainerStarted","Data":"18c1da6eae35bc17b6ad05c349e2c90d8fadd02dc0329bef18582ea44eb2b0eb"} Sep 30 00:36:31 crc kubenswrapper[4809]: I0930 00:36:31.093622 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rfldr" podStartSLOduration=2.534116203 podStartE2EDuration="7.093605542s" podCreationTimestamp="2025-09-30 00:36:24 +0000 UTC" firstStartedPulling="2025-09-30 00:36:25.970763353 +0000 UTC m=+1637.007012761" lastFinishedPulling="2025-09-30 00:36:30.530252692 +0000 UTC m=+1641.566502100" observedRunningTime="2025-09-30 00:36:31.079487105 +0000 UTC m=+1642.115736513" watchObservedRunningTime="2025-09-30 00:36:31.093605542 +0000 UTC m=+1642.129854950" Sep 30 00:36:35 crc kubenswrapper[4809]: I0930 00:36:35.154772 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rfldr" Sep 30 00:36:35 crc kubenswrapper[4809]: I0930 00:36:35.156684 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rfldr" Sep 30 00:36:36 crc kubenswrapper[4809]: I0930 00:36:36.219499 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rfldr" podUID="ca19c567-76df-4a04-b21f-79ae97f8fa4e" containerName="registry-server" probeResult="failure" output=< Sep 30 00:36:36 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 00:36:36 crc kubenswrapper[4809]: > Sep 30 00:36:41 crc kubenswrapper[4809]: I0930 00:36:41.691529 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:36:41 crc kubenswrapper[4809]: E0930 00:36:41.692305 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:36:45 crc kubenswrapper[4809]: I0930 00:36:45.227124 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rfldr" Sep 30 00:36:45 crc kubenswrapper[4809]: I0930 00:36:45.277344 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rfldr" Sep 30 00:36:45 crc kubenswrapper[4809]: I0930 00:36:45.470175 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rfldr"] Sep 30 00:36:47 crc kubenswrapper[4809]: I0930 00:36:47.234448 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rfldr" podUID="ca19c567-76df-4a04-b21f-79ae97f8fa4e" containerName="registry-server" containerID="cri-o://18c1da6eae35bc17b6ad05c349e2c90d8fadd02dc0329bef18582ea44eb2b0eb" gracePeriod=2 Sep 30 00:36:47 crc kubenswrapper[4809]: I0930 00:36:47.835245 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rfldr" Sep 30 00:36:47 crc kubenswrapper[4809]: I0930 00:36:47.945424 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca19c567-76df-4a04-b21f-79ae97f8fa4e-utilities" (OuterVolumeSpecName: "utilities") pod "ca19c567-76df-4a04-b21f-79ae97f8fa4e" (UID: "ca19c567-76df-4a04-b21f-79ae97f8fa4e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:36:47 crc kubenswrapper[4809]: I0930 00:36:47.944477 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca19c567-76df-4a04-b21f-79ae97f8fa4e-utilities\") pod \"ca19c567-76df-4a04-b21f-79ae97f8fa4e\" (UID: \"ca19c567-76df-4a04-b21f-79ae97f8fa4e\") " Sep 30 00:36:47 crc kubenswrapper[4809]: I0930 00:36:47.946692 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca19c567-76df-4a04-b21f-79ae97f8fa4e-catalog-content\") pod \"ca19c567-76df-4a04-b21f-79ae97f8fa4e\" (UID: \"ca19c567-76df-4a04-b21f-79ae97f8fa4e\") " Sep 30 00:36:47 crc kubenswrapper[4809]: I0930 00:36:47.946837 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfcnb\" (UniqueName: \"kubernetes.io/projected/ca19c567-76df-4a04-b21f-79ae97f8fa4e-kube-api-access-pfcnb\") pod \"ca19c567-76df-4a04-b21f-79ae97f8fa4e\" (UID: \"ca19c567-76df-4a04-b21f-79ae97f8fa4e\") " Sep 30 00:36:47 crc kubenswrapper[4809]: I0930 00:36:47.947832 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca19c567-76df-4a04-b21f-79ae97f8fa4e-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:47 crc kubenswrapper[4809]: I0930 00:36:47.954993 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca19c567-76df-4a04-b21f-79ae97f8fa4e-kube-api-access-pfcnb" (OuterVolumeSpecName: "kube-api-access-pfcnb") pod "ca19c567-76df-4a04-b21f-79ae97f8fa4e" (UID: "ca19c567-76df-4a04-b21f-79ae97f8fa4e"). InnerVolumeSpecName "kube-api-access-pfcnb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:36:48 crc kubenswrapper[4809]: I0930 00:36:48.045230 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca19c567-76df-4a04-b21f-79ae97f8fa4e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ca19c567-76df-4a04-b21f-79ae97f8fa4e" (UID: "ca19c567-76df-4a04-b21f-79ae97f8fa4e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:36:48 crc kubenswrapper[4809]: I0930 00:36:48.049862 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca19c567-76df-4a04-b21f-79ae97f8fa4e-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:48 crc kubenswrapper[4809]: I0930 00:36:48.049893 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfcnb\" (UniqueName: \"kubernetes.io/projected/ca19c567-76df-4a04-b21f-79ae97f8fa4e-kube-api-access-pfcnb\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:48 crc kubenswrapper[4809]: I0930 00:36:48.248001 4809 generic.go:334] "Generic (PLEG): container finished" podID="ca19c567-76df-4a04-b21f-79ae97f8fa4e" containerID="18c1da6eae35bc17b6ad05c349e2c90d8fadd02dc0329bef18582ea44eb2b0eb" exitCode=0 Sep 30 00:36:48 crc kubenswrapper[4809]: I0930 00:36:48.248055 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rfldr" Sep 30 00:36:48 crc kubenswrapper[4809]: I0930 00:36:48.248069 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rfldr" event={"ID":"ca19c567-76df-4a04-b21f-79ae97f8fa4e","Type":"ContainerDied","Data":"18c1da6eae35bc17b6ad05c349e2c90d8fadd02dc0329bef18582ea44eb2b0eb"} Sep 30 00:36:48 crc kubenswrapper[4809]: I0930 00:36:48.248460 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rfldr" event={"ID":"ca19c567-76df-4a04-b21f-79ae97f8fa4e","Type":"ContainerDied","Data":"d1ac62dc7d8ae6b3ed1fa0d63d1e7a42f44e3146cf431fc6fced95765e09abb3"} Sep 30 00:36:48 crc kubenswrapper[4809]: I0930 00:36:48.248492 4809 scope.go:117] "RemoveContainer" containerID="18c1da6eae35bc17b6ad05c349e2c90d8fadd02dc0329bef18582ea44eb2b0eb" Sep 30 00:36:48 crc kubenswrapper[4809]: I0930 00:36:48.280712 4809 scope.go:117] "RemoveContainer" containerID="b4967ede97f1fbc44ac16dbd9a2def39872f875d59ba2dc9d703122d3cb15615" Sep 30 00:36:48 crc kubenswrapper[4809]: I0930 00:36:48.291828 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rfldr"] Sep 30 00:36:48 crc kubenswrapper[4809]: I0930 00:36:48.301378 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rfldr"] Sep 30 00:36:48 crc kubenswrapper[4809]: I0930 00:36:48.315085 4809 scope.go:117] "RemoveContainer" containerID="cce76b81465bfc55d06be8275cb5f7cfe05ab7ffeebd462fe4347302550ab13a" Sep 30 00:36:48 crc kubenswrapper[4809]: I0930 00:36:48.347839 4809 scope.go:117] "RemoveContainer" containerID="18c1da6eae35bc17b6ad05c349e2c90d8fadd02dc0329bef18582ea44eb2b0eb" Sep 30 00:36:48 crc kubenswrapper[4809]: E0930 00:36:48.349085 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18c1da6eae35bc17b6ad05c349e2c90d8fadd02dc0329bef18582ea44eb2b0eb\": container with ID starting with 18c1da6eae35bc17b6ad05c349e2c90d8fadd02dc0329bef18582ea44eb2b0eb not found: ID does not exist" containerID="18c1da6eae35bc17b6ad05c349e2c90d8fadd02dc0329bef18582ea44eb2b0eb" Sep 30 00:36:48 crc kubenswrapper[4809]: I0930 00:36:48.349122 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18c1da6eae35bc17b6ad05c349e2c90d8fadd02dc0329bef18582ea44eb2b0eb"} err="failed to get container status \"18c1da6eae35bc17b6ad05c349e2c90d8fadd02dc0329bef18582ea44eb2b0eb\": rpc error: code = NotFound desc = could not find container \"18c1da6eae35bc17b6ad05c349e2c90d8fadd02dc0329bef18582ea44eb2b0eb\": container with ID starting with 18c1da6eae35bc17b6ad05c349e2c90d8fadd02dc0329bef18582ea44eb2b0eb not found: ID does not exist" Sep 30 00:36:48 crc kubenswrapper[4809]: I0930 00:36:48.349151 4809 scope.go:117] "RemoveContainer" containerID="b4967ede97f1fbc44ac16dbd9a2def39872f875d59ba2dc9d703122d3cb15615" Sep 30 00:36:48 crc kubenswrapper[4809]: E0930 00:36:48.349489 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4967ede97f1fbc44ac16dbd9a2def39872f875d59ba2dc9d703122d3cb15615\": container with ID starting with b4967ede97f1fbc44ac16dbd9a2def39872f875d59ba2dc9d703122d3cb15615 not found: ID does not exist" containerID="b4967ede97f1fbc44ac16dbd9a2def39872f875d59ba2dc9d703122d3cb15615" Sep 30 00:36:48 crc kubenswrapper[4809]: I0930 00:36:48.349549 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4967ede97f1fbc44ac16dbd9a2def39872f875d59ba2dc9d703122d3cb15615"} err="failed to get container status \"b4967ede97f1fbc44ac16dbd9a2def39872f875d59ba2dc9d703122d3cb15615\": rpc error: code = NotFound desc = could not find container \"b4967ede97f1fbc44ac16dbd9a2def39872f875d59ba2dc9d703122d3cb15615\": container with ID starting with b4967ede97f1fbc44ac16dbd9a2def39872f875d59ba2dc9d703122d3cb15615 not found: ID does not exist" Sep 30 00:36:48 crc kubenswrapper[4809]: I0930 00:36:48.349592 4809 scope.go:117] "RemoveContainer" containerID="cce76b81465bfc55d06be8275cb5f7cfe05ab7ffeebd462fe4347302550ab13a" Sep 30 00:36:48 crc kubenswrapper[4809]: E0930 00:36:48.349965 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cce76b81465bfc55d06be8275cb5f7cfe05ab7ffeebd462fe4347302550ab13a\": container with ID starting with cce76b81465bfc55d06be8275cb5f7cfe05ab7ffeebd462fe4347302550ab13a not found: ID does not exist" containerID="cce76b81465bfc55d06be8275cb5f7cfe05ab7ffeebd462fe4347302550ab13a" Sep 30 00:36:48 crc kubenswrapper[4809]: I0930 00:36:48.349997 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cce76b81465bfc55d06be8275cb5f7cfe05ab7ffeebd462fe4347302550ab13a"} err="failed to get container status \"cce76b81465bfc55d06be8275cb5f7cfe05ab7ffeebd462fe4347302550ab13a\": rpc error: code = NotFound desc = could not find container \"cce76b81465bfc55d06be8275cb5f7cfe05ab7ffeebd462fe4347302550ab13a\": container with ID starting with cce76b81465bfc55d06be8275cb5f7cfe05ab7ffeebd462fe4347302550ab13a not found: ID does not exist" Sep 30 00:36:49 crc kubenswrapper[4809]: I0930 00:36:49.713895 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca19c567-76df-4a04-b21f-79ae97f8fa4e" path="/var/lib/kubelet/pods/ca19c567-76df-4a04-b21f-79ae97f8fa4e/volumes" Sep 30 00:36:52 crc kubenswrapper[4809]: I0930 00:36:52.692087 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:36:52 crc kubenswrapper[4809]: E0930 00:36:52.693061 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:37:06 crc kubenswrapper[4809]: I0930 00:37:06.691566 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:37:06 crc kubenswrapper[4809]: E0930 00:37:06.692863 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:37:12 crc kubenswrapper[4809]: I0930 00:37:12.310162 4809 scope.go:117] "RemoveContainer" containerID="1beb5e23176fd0bf97f216ee81ebd3a771c6ab6d654f17030bbea4ced6dd2d55" Sep 30 00:37:12 crc kubenswrapper[4809]: I0930 00:37:12.350045 4809 scope.go:117] "RemoveContainer" containerID="8b464112038f1ab3f1ccd2a4c4267fa1c4ee86a4354c7022b82deb88788ac059" Sep 30 00:37:12 crc kubenswrapper[4809]: I0930 00:37:12.408027 4809 scope.go:117] "RemoveContainer" containerID="2c1eb314454e5cd9cbd284b2b4e8aa68056a74a16282adba6916ca0b91942d34" Sep 30 00:37:12 crc kubenswrapper[4809]: I0930 00:37:12.465553 4809 scope.go:117] "RemoveContainer" containerID="e65fb4d92202a9bb5d1d214b51244bea6063640ab61317de7b8a72e3db21be73" Sep 30 00:37:12 crc kubenswrapper[4809]: I0930 00:37:12.489210 4809 scope.go:117] "RemoveContainer" containerID="31b0820affe756fc69d15e562cf75e64a4573de59db2f9de9e4bd2c105e05e90" Sep 30 00:37:19 crc kubenswrapper[4809]: I0930 00:37:19.701550 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:37:19 crc kubenswrapper[4809]: E0930 00:37:19.702349 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:37:32 crc kubenswrapper[4809]: I0930 00:37:32.693062 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:37:32 crc kubenswrapper[4809]: E0930 00:37:32.694141 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:37:46 crc kubenswrapper[4809]: I0930 00:37:46.691556 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:37:46 crc kubenswrapper[4809]: E0930 00:37:46.692848 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:37:59 crc kubenswrapper[4809]: I0930 00:37:59.701843 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:37:59 crc kubenswrapper[4809]: E0930 00:37:59.702967 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:38:09 crc kubenswrapper[4809]: I0930 00:38:09.163225 4809 generic.go:334] "Generic (PLEG): container finished" podID="63766b50-2bbd-4532-89a3-83cc3b063a52" containerID="0834ffc58f15979ef7c0585aabdb910499ca9f968ee4816334ef374874270d6c" exitCode=0 Sep 30 00:38:09 crc kubenswrapper[4809]: I0930 00:38:09.163279 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" event={"ID":"63766b50-2bbd-4532-89a3-83cc3b063a52","Type":"ContainerDied","Data":"0834ffc58f15979ef7c0585aabdb910499ca9f968ee4816334ef374874270d6c"} Sep 30 00:38:10 crc kubenswrapper[4809]: I0930 00:38:10.621312 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" Sep 30 00:38:10 crc kubenswrapper[4809]: I0930 00:38:10.692244 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:38:10 crc kubenswrapper[4809]: E0930 00:38:10.692502 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:38:10 crc kubenswrapper[4809]: I0930 00:38:10.721679 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63766b50-2bbd-4532-89a3-83cc3b063a52-bootstrap-combined-ca-bundle\") pod \"63766b50-2bbd-4532-89a3-83cc3b063a52\" (UID: \"63766b50-2bbd-4532-89a3-83cc3b063a52\") " Sep 30 00:38:10 crc kubenswrapper[4809]: I0930 00:38:10.721869 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/63766b50-2bbd-4532-89a3-83cc3b063a52-inventory\") pod \"63766b50-2bbd-4532-89a3-83cc3b063a52\" (UID: \"63766b50-2bbd-4532-89a3-83cc3b063a52\") " Sep 30 00:38:10 crc kubenswrapper[4809]: I0930 00:38:10.722219 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hvt7\" (UniqueName: \"kubernetes.io/projected/63766b50-2bbd-4532-89a3-83cc3b063a52-kube-api-access-6hvt7\") pod \"63766b50-2bbd-4532-89a3-83cc3b063a52\" (UID: \"63766b50-2bbd-4532-89a3-83cc3b063a52\") " Sep 30 00:38:10 crc kubenswrapper[4809]: I0930 00:38:10.722352 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/63766b50-2bbd-4532-89a3-83cc3b063a52-ssh-key\") pod \"63766b50-2bbd-4532-89a3-83cc3b063a52\" (UID: \"63766b50-2bbd-4532-89a3-83cc3b063a52\") " Sep 30 00:38:10 crc kubenswrapper[4809]: I0930 00:38:10.728178 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63766b50-2bbd-4532-89a3-83cc3b063a52-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "63766b50-2bbd-4532-89a3-83cc3b063a52" (UID: "63766b50-2bbd-4532-89a3-83cc3b063a52"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:38:10 crc kubenswrapper[4809]: I0930 00:38:10.728336 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63766b50-2bbd-4532-89a3-83cc3b063a52-kube-api-access-6hvt7" (OuterVolumeSpecName: "kube-api-access-6hvt7") pod "63766b50-2bbd-4532-89a3-83cc3b063a52" (UID: "63766b50-2bbd-4532-89a3-83cc3b063a52"). InnerVolumeSpecName "kube-api-access-6hvt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:38:10 crc kubenswrapper[4809]: I0930 00:38:10.769615 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63766b50-2bbd-4532-89a3-83cc3b063a52-inventory" (OuterVolumeSpecName: "inventory") pod "63766b50-2bbd-4532-89a3-83cc3b063a52" (UID: "63766b50-2bbd-4532-89a3-83cc3b063a52"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:38:10 crc kubenswrapper[4809]: I0930 00:38:10.772614 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63766b50-2bbd-4532-89a3-83cc3b063a52-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "63766b50-2bbd-4532-89a3-83cc3b063a52" (UID: "63766b50-2bbd-4532-89a3-83cc3b063a52"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:38:10 crc kubenswrapper[4809]: I0930 00:38:10.828449 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hvt7\" (UniqueName: \"kubernetes.io/projected/63766b50-2bbd-4532-89a3-83cc3b063a52-kube-api-access-6hvt7\") on node \"crc\" DevicePath \"\"" Sep 30 00:38:10 crc kubenswrapper[4809]: I0930 00:38:10.828488 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/63766b50-2bbd-4532-89a3-83cc3b063a52-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:38:10 crc kubenswrapper[4809]: I0930 00:38:10.828502 4809 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63766b50-2bbd-4532-89a3-83cc3b063a52-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:38:10 crc kubenswrapper[4809]: I0930 00:38:10.828515 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/63766b50-2bbd-4532-89a3-83cc3b063a52-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.184413 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" event={"ID":"63766b50-2bbd-4532-89a3-83cc3b063a52","Type":"ContainerDied","Data":"10f8ce61afd928235e08c3881bbdd3274323ca1ac72ec93d0360a57d229b56bb"} Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.184743 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="10f8ce61afd928235e08c3881bbdd3274323ca1ac72ec93d0360a57d229b56bb" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.184569 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.283751 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg"] Sep 30 00:38:11 crc kubenswrapper[4809]: E0930 00:38:11.284246 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63766b50-2bbd-4532-89a3-83cc3b063a52" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.284265 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="63766b50-2bbd-4532-89a3-83cc3b063a52" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 00:38:11 crc kubenswrapper[4809]: E0930 00:38:11.284286 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca19c567-76df-4a04-b21f-79ae97f8fa4e" containerName="extract-content" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.284292 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca19c567-76df-4a04-b21f-79ae97f8fa4e" containerName="extract-content" Sep 30 00:38:11 crc kubenswrapper[4809]: E0930 00:38:11.284317 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca19c567-76df-4a04-b21f-79ae97f8fa4e" containerName="registry-server" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.284323 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca19c567-76df-4a04-b21f-79ae97f8fa4e" containerName="registry-server" Sep 30 00:38:11 crc kubenswrapper[4809]: E0930 00:38:11.284336 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca19c567-76df-4a04-b21f-79ae97f8fa4e" containerName="extract-utilities" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.284342 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca19c567-76df-4a04-b21f-79ae97f8fa4e" containerName="extract-utilities" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.287590 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="63766b50-2bbd-4532-89a3-83cc3b063a52" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.287705 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca19c567-76df-4a04-b21f-79ae97f8fa4e" containerName="registry-server" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.290417 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.296542 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.296766 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.313817 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.314126 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.317259 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg"] Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.440840 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9a35eaeb-456c-42d9-bd9a-9eb736876d56-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-rskbg\" (UID: \"9a35eaeb-456c-42d9-bd9a-9eb736876d56\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.441101 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkhrl\" (UniqueName: \"kubernetes.io/projected/9a35eaeb-456c-42d9-bd9a-9eb736876d56-kube-api-access-nkhrl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-rskbg\" (UID: \"9a35eaeb-456c-42d9-bd9a-9eb736876d56\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.441183 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9a35eaeb-456c-42d9-bd9a-9eb736876d56-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-rskbg\" (UID: \"9a35eaeb-456c-42d9-bd9a-9eb736876d56\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.543813 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkhrl\" (UniqueName: \"kubernetes.io/projected/9a35eaeb-456c-42d9-bd9a-9eb736876d56-kube-api-access-nkhrl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-rskbg\" (UID: \"9a35eaeb-456c-42d9-bd9a-9eb736876d56\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.543938 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9a35eaeb-456c-42d9-bd9a-9eb736876d56-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-rskbg\" (UID: \"9a35eaeb-456c-42d9-bd9a-9eb736876d56\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.544089 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9a35eaeb-456c-42d9-bd9a-9eb736876d56-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-rskbg\" (UID: \"9a35eaeb-456c-42d9-bd9a-9eb736876d56\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.548792 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9a35eaeb-456c-42d9-bd9a-9eb736876d56-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-rskbg\" (UID: \"9a35eaeb-456c-42d9-bd9a-9eb736876d56\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.550107 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9a35eaeb-456c-42d9-bd9a-9eb736876d56-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-rskbg\" (UID: \"9a35eaeb-456c-42d9-bd9a-9eb736876d56\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.564464 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkhrl\" (UniqueName: \"kubernetes.io/projected/9a35eaeb-456c-42d9-bd9a-9eb736876d56-kube-api-access-nkhrl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-rskbg\" (UID: \"9a35eaeb-456c-42d9-bd9a-9eb736876d56\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg" Sep 30 00:38:11 crc kubenswrapper[4809]: I0930 00:38:11.612279 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg" Sep 30 00:38:12 crc kubenswrapper[4809]: I0930 00:38:12.174092 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg"] Sep 30 00:38:12 crc kubenswrapper[4809]: I0930 00:38:12.199677 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg" event={"ID":"9a35eaeb-456c-42d9-bd9a-9eb736876d56","Type":"ContainerStarted","Data":"72b9224d0e036ee978af69ab7191867c91c0b86dafc48b9c91b57713c09739c7"} Sep 30 00:38:12 crc kubenswrapper[4809]: I0930 00:38:12.586871 4809 scope.go:117] "RemoveContainer" containerID="38d4a99bab5f95a3b5960d15610683ae25f67ff0350f9ea36cae626455c2a0c9" Sep 30 00:38:13 crc kubenswrapper[4809]: I0930 00:38:13.217376 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg" event={"ID":"9a35eaeb-456c-42d9-bd9a-9eb736876d56","Type":"ContainerStarted","Data":"fecb52858e5c57ce2303dab098f2883d803e3006fbc4b6c7e27cefafe6d44169"} Sep 30 00:38:13 crc kubenswrapper[4809]: I0930 00:38:13.246875 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg" podStartSLOduration=1.6977741750000002 podStartE2EDuration="2.246850195s" podCreationTimestamp="2025-09-30 00:38:11 +0000 UTC" firstStartedPulling="2025-09-30 00:38:12.17733762 +0000 UTC m=+1743.213587028" lastFinishedPulling="2025-09-30 00:38:12.72641364 +0000 UTC m=+1743.762663048" observedRunningTime="2025-09-30 00:38:13.23757238 +0000 UTC m=+1744.273821798" watchObservedRunningTime="2025-09-30 00:38:13.246850195 +0000 UTC m=+1744.283099603" Sep 30 00:38:16 crc kubenswrapper[4809]: I0930 00:38:16.042476 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mysqld-exporter-openstack-db-create-w7ngf"] Sep 30 00:38:16 crc kubenswrapper[4809]: I0930 00:38:16.066364 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mysqld-exporter-openstack-db-create-w7ngf"] Sep 30 00:38:17 crc kubenswrapper[4809]: I0930 00:38:17.709828 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e0dcad9-a81c-4505-9d6c-63d3524defbd" path="/var/lib/kubelet/pods/0e0dcad9-a81c-4505-9d6c-63d3524defbd/volumes" Sep 30 00:38:24 crc kubenswrapper[4809]: I0930 00:38:24.690886 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:38:24 crc kubenswrapper[4809]: E0930 00:38:24.691689 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:38:30 crc kubenswrapper[4809]: I0930 00:38:30.042515 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-tbrzd"] Sep 30 00:38:30 crc kubenswrapper[4809]: I0930 00:38:30.055948 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-tbrzd"] Sep 30 00:38:31 crc kubenswrapper[4809]: I0930 00:38:31.040688 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-p59l6"] Sep 30 00:38:31 crc kubenswrapper[4809]: I0930 00:38:31.053690 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-p59l6"] Sep 30 00:38:31 crc kubenswrapper[4809]: I0930 00:38:31.713283 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b39da91-0d25-4093-babd-f7ae25503f07" path="/var/lib/kubelet/pods/5b39da91-0d25-4093-babd-f7ae25503f07/volumes" Sep 30 00:38:31 crc kubenswrapper[4809]: I0930 00:38:31.714001 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66b5e01c-bf65-4303-a4b6-20f2fd145710" path="/var/lib/kubelet/pods/66b5e01c-bf65-4303-a4b6-20f2fd145710/volumes" Sep 30 00:38:32 crc kubenswrapper[4809]: I0930 00:38:32.031043 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-bgw8m"] Sep 30 00:38:32 crc kubenswrapper[4809]: I0930 00:38:32.042485 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mysqld-exporter-f99b-account-create-j64pv"] Sep 30 00:38:32 crc kubenswrapper[4809]: I0930 00:38:32.054412 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-bgw8m"] Sep 30 00:38:32 crc kubenswrapper[4809]: I0930 00:38:32.063234 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mysqld-exporter-f99b-account-create-j64pv"] Sep 30 00:38:33 crc kubenswrapper[4809]: I0930 00:38:33.703262 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83ce5843-8bf2-4db6-b80e-dc6aade1f47d" path="/var/lib/kubelet/pods/83ce5843-8bf2-4db6-b80e-dc6aade1f47d/volumes" Sep 30 00:38:33 crc kubenswrapper[4809]: I0930 00:38:33.704360 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae496d32-ef13-44cc-81aa-06be39a6cb4b" path="/var/lib/kubelet/pods/ae496d32-ef13-44cc-81aa-06be39a6cb4b/volumes" Sep 30 00:38:37 crc kubenswrapper[4809]: I0930 00:38:37.032377 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mysqld-exporter-openstack-cell1-db-create-56n4d"] Sep 30 00:38:37 crc kubenswrapper[4809]: I0930 00:38:37.041804 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mysqld-exporter-openstack-cell1-db-create-56n4d"] Sep 30 00:38:37 crc kubenswrapper[4809]: I0930 00:38:37.705403 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de0d3f45-aea9-4cd7-9876-0de2feee40f8" path="/var/lib/kubelet/pods/de0d3f45-aea9-4cd7-9876-0de2feee40f8/volumes" Sep 30 00:38:38 crc kubenswrapper[4809]: I0930 00:38:38.691115 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:38:38 crc kubenswrapper[4809]: E0930 00:38:38.691778 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:38:42 crc kubenswrapper[4809]: I0930 00:38:42.035980 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-sdjks"] Sep 30 00:38:42 crc kubenswrapper[4809]: I0930 00:38:42.048939 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-cfdjx"] Sep 30 00:38:42 crc kubenswrapper[4809]: I0930 00:38:42.059427 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-shpsr"] Sep 30 00:38:42 crc kubenswrapper[4809]: I0930 00:38:42.071496 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-cfdjx"] Sep 30 00:38:42 crc kubenswrapper[4809]: I0930 00:38:42.082430 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-shpsr"] Sep 30 00:38:42 crc kubenswrapper[4809]: I0930 00:38:42.094053 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-sdjks"] Sep 30 00:38:43 crc kubenswrapper[4809]: I0930 00:38:43.028216 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-hcb4b"] Sep 30 00:38:43 crc kubenswrapper[4809]: I0930 00:38:43.039208 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-hcb4b"] Sep 30 00:38:43 crc kubenswrapper[4809]: I0930 00:38:43.703149 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60d6da9e-f759-4557-bc7a-9e73d53728a2" path="/var/lib/kubelet/pods/60d6da9e-f759-4557-bc7a-9e73d53728a2/volumes" Sep 30 00:38:43 crc kubenswrapper[4809]: I0930 00:38:43.703832 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f8f7eac-270c-47bc-bfbf-acd03e83bdf6" path="/var/lib/kubelet/pods/7f8f7eac-270c-47bc-bfbf-acd03e83bdf6/volumes" Sep 30 00:38:43 crc kubenswrapper[4809]: I0930 00:38:43.704494 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f3d710a-877b-4d2b-922a-83aba52d6e9c" path="/var/lib/kubelet/pods/8f3d710a-877b-4d2b-922a-83aba52d6e9c/volumes" Sep 30 00:38:43 crc kubenswrapper[4809]: I0930 00:38:43.705059 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d346f7a2-3667-4066-975e-9b834ddffcfd" path="/var/lib/kubelet/pods/d346f7a2-3667-4066-975e-9b834ddffcfd/volumes" Sep 30 00:38:45 crc kubenswrapper[4809]: I0930 00:38:45.031374 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-6fe4-account-create-g7zd6"] Sep 30 00:38:45 crc kubenswrapper[4809]: I0930 00:38:45.043743 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-6fe4-account-create-g7zd6"] Sep 30 00:38:45 crc kubenswrapper[4809]: I0930 00:38:45.711347 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4" path="/var/lib/kubelet/pods/d88a00fe-c1c8-44b9-9efa-0c9503ad7ad4/volumes" Sep 30 00:38:46 crc kubenswrapper[4809]: I0930 00:38:46.049747 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-bd76-account-create-bfxh6"] Sep 30 00:38:46 crc kubenswrapper[4809]: I0930 00:38:46.068465 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-bd76-account-create-bfxh6"] Sep 30 00:38:46 crc kubenswrapper[4809]: I0930 00:38:46.083335 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-186e-account-create-lt9zr"] Sep 30 00:38:46 crc kubenswrapper[4809]: I0930 00:38:46.095709 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-186e-account-create-lt9zr"] Sep 30 00:38:47 crc kubenswrapper[4809]: I0930 00:38:47.033669 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mysqld-exporter-605e-account-create-27tv4"] Sep 30 00:38:47 crc kubenswrapper[4809]: I0930 00:38:47.045835 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mysqld-exporter-605e-account-create-27tv4"] Sep 30 00:38:47 crc kubenswrapper[4809]: I0930 00:38:47.706095 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22bc004f-362d-45a9-9643-b308efabecdf" path="/var/lib/kubelet/pods/22bc004f-362d-45a9-9643-b308efabecdf/volumes" Sep 30 00:38:47 crc kubenswrapper[4809]: I0930 00:38:47.708165 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94631e9e-696e-4dcb-b06e-386d66459e6f" path="/var/lib/kubelet/pods/94631e9e-696e-4dcb-b06e-386d66459e6f/volumes" Sep 30 00:38:47 crc kubenswrapper[4809]: I0930 00:38:47.709734 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd07d831-7a41-45a6-a4a5-abd8b40528d4" path="/var/lib/kubelet/pods/fd07d831-7a41-45a6-a4a5-abd8b40528d4/volumes" Sep 30 00:38:53 crc kubenswrapper[4809]: I0930 00:38:53.692715 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:38:53 crc kubenswrapper[4809]: E0930 00:38:53.694616 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:38:55 crc kubenswrapper[4809]: I0930 00:38:55.032873 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-c98f-account-create-b2zgz"] Sep 30 00:38:55 crc kubenswrapper[4809]: I0930 00:38:55.042549 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-f22f-account-create-ctshj"] Sep 30 00:38:55 crc kubenswrapper[4809]: I0930 00:38:55.052016 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-491f-account-create-fwgpd"] Sep 30 00:38:55 crc kubenswrapper[4809]: I0930 00:38:55.060277 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-f22f-account-create-ctshj"] Sep 30 00:38:55 crc kubenswrapper[4809]: I0930 00:38:55.069072 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-491f-account-create-fwgpd"] Sep 30 00:38:55 crc kubenswrapper[4809]: I0930 00:38:55.077498 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-c98f-account-create-b2zgz"] Sep 30 00:38:55 crc kubenswrapper[4809]: I0930 00:38:55.702173 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54c601fe-b3c3-40ee-bf5d-6f15b4535a22" path="/var/lib/kubelet/pods/54c601fe-b3c3-40ee-bf5d-6f15b4535a22/volumes" Sep 30 00:38:55 crc kubenswrapper[4809]: I0930 00:38:55.702760 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70c3a309-0bd5-4672-bfeb-876af5a894ca" path="/var/lib/kubelet/pods/70c3a309-0bd5-4672-bfeb-876af5a894ca/volumes" Sep 30 00:38:55 crc kubenswrapper[4809]: I0930 00:38:55.703306 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb21eb26-8a42-4a7c-ace5-392c2284e0e5" path="/var/lib/kubelet/pods/eb21eb26-8a42-4a7c-ace5-392c2284e0e5/volumes" Sep 30 00:39:07 crc kubenswrapper[4809]: I0930 00:39:07.039551 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-97da-account-create-qnblg"] Sep 30 00:39:07 crc kubenswrapper[4809]: I0930 00:39:07.048547 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-97da-account-create-qnblg"] Sep 30 00:39:07 crc kubenswrapper[4809]: I0930 00:39:07.691337 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:39:07 crc kubenswrapper[4809]: E0930 00:39:07.692026 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:39:07 crc kubenswrapper[4809]: I0930 00:39:07.705281 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="038807b6-5f36-4116-a895-c64ad207d87d" path="/var/lib/kubelet/pods/038807b6-5f36-4116-a895-c64ad207d87d/volumes" Sep 30 00:39:12 crc kubenswrapper[4809]: I0930 00:39:12.661173 4809 scope.go:117] "RemoveContainer" containerID="9a1dadb829781ef631ddbd69538f1611e0e68892d41a28d305f4b9d010d10a3f" Sep 30 00:39:12 crc kubenswrapper[4809]: I0930 00:39:12.695364 4809 scope.go:117] "RemoveContainer" containerID="ae20599982105d8cfe303eca7195a3aa9cccd6c8d9fcd4fff628924600d2caca" Sep 30 00:39:12 crc kubenswrapper[4809]: I0930 00:39:12.753806 4809 scope.go:117] "RemoveContainer" containerID="1cc8266f7afe5c3cb62635feaa024dd9bb9c72918aafa1e92e52dd4ff52d7b69" Sep 30 00:39:12 crc kubenswrapper[4809]: I0930 00:39:12.817564 4809 scope.go:117] "RemoveContainer" containerID="a9b9df5168f52c550629419bd8bccb7fe9c40af7e04940ddf6a1d377bed54178" Sep 30 00:39:12 crc kubenswrapper[4809]: I0930 00:39:12.862439 4809 scope.go:117] "RemoveContainer" containerID="069ff1e20ae1561864452edfba68e7df09eb517dbf68daa25bc931209c6d5a6b" Sep 30 00:39:12 crc kubenswrapper[4809]: I0930 00:39:12.939702 4809 scope.go:117] "RemoveContainer" containerID="dcbccc737f88305cdf0ae39783f1bd8027b34897b90d4e1c42687a75d578a31a" Sep 30 00:39:12 crc kubenswrapper[4809]: I0930 00:39:12.999334 4809 scope.go:117] "RemoveContainer" containerID="4237027638fb1fefdd8550cadce45021c733f8de87bf6aa518a79e25262558a9" Sep 30 00:39:13 crc kubenswrapper[4809]: I0930 00:39:13.025578 4809 scope.go:117] "RemoveContainer" containerID="071c09296afcb6d02c440174eac027529130993f644e202748d64549a9f113fa" Sep 30 00:39:13 crc kubenswrapper[4809]: I0930 00:39:13.048432 4809 scope.go:117] "RemoveContainer" containerID="70b1eb2878e69129416b39fe579c3dd63925765bed63d5c8e87523c3e9be342b" Sep 30 00:39:13 crc kubenswrapper[4809]: I0930 00:39:13.072889 4809 scope.go:117] "RemoveContainer" containerID="7594a4bebcf6878b7b71dc4cead1eb2cadf8d93eb563c2f0b74ef1d47d2bec31" Sep 30 00:39:13 crc kubenswrapper[4809]: I0930 00:39:13.097620 4809 scope.go:117] "RemoveContainer" containerID="47124f3ee9666dc743709b2ae189206e5cdf8cdffcc0ec8f9dbcbb7d9e3866c5" Sep 30 00:39:13 crc kubenswrapper[4809]: I0930 00:39:13.125396 4809 scope.go:117] "RemoveContainer" containerID="946b7d706704df502abc7ba17547fd033dd5262b5588dfbd2a1e1eb3a71cd424" Sep 30 00:39:13 crc kubenswrapper[4809]: I0930 00:39:13.148385 4809 scope.go:117] "RemoveContainer" containerID="fc85fbb277cd26a0f8bd99a6b7877dc8bca70ef4b64e3b25ffd83e0f294578ef" Sep 30 00:39:13 crc kubenswrapper[4809]: I0930 00:39:13.169926 4809 scope.go:117] "RemoveContainer" containerID="e276e0c737f10eb66070179e438def9a33f07a875e417d82ec926bf59ab6d08a" Sep 30 00:39:13 crc kubenswrapper[4809]: I0930 00:39:13.193281 4809 scope.go:117] "RemoveContainer" containerID="daf647f2ec2c8eabfa7ad9847e1fd9cd610f40dd7afd9c20ec38c946d0659c1a" Sep 30 00:39:13 crc kubenswrapper[4809]: I0930 00:39:13.227598 4809 scope.go:117] "RemoveContainer" containerID="631225e9da2ec062bc44ad4fbf1975da64f2e0c2375d40b684dd7299e2e38a41" Sep 30 00:39:13 crc kubenswrapper[4809]: I0930 00:39:13.258791 4809 scope.go:117] "RemoveContainer" containerID="2c0fce65679830ef0ff486d42dabedd6eaad41e4b2f4bc33f3c141b3046dceda" Sep 30 00:39:13 crc kubenswrapper[4809]: I0930 00:39:13.285349 4809 scope.go:117] "RemoveContainer" containerID="ec0fe56f1b96f69fbf3f827f6a34e8d54debc382f7ea8c3dd3e1cbb208fd5e1e" Sep 30 00:39:13 crc kubenswrapper[4809]: I0930 00:39:13.309036 4809 scope.go:117] "RemoveContainer" containerID="864eef583f9e50939f1797ea0cdfa2310fe30f4fc2e6e3e476dca8519fdbf12f" Sep 30 00:39:13 crc kubenswrapper[4809]: I0930 00:39:13.362220 4809 scope.go:117] "RemoveContainer" containerID="e23b2903c81e7379fc434415b27baf1ee2cf049a645a0dd5770776ef817dc25d" Sep 30 00:39:13 crc kubenswrapper[4809]: I0930 00:39:13.402100 4809 scope.go:117] "RemoveContainer" containerID="7bcc1a0ad45e219b1362f9dd13065a578aa2d772cbc8a2825f4be2184868d92d" Sep 30 00:39:22 crc kubenswrapper[4809]: I0930 00:39:22.030057 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-j2xxj"] Sep 30 00:39:22 crc kubenswrapper[4809]: I0930 00:39:22.038081 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-j2xxj"] Sep 30 00:39:22 crc kubenswrapper[4809]: I0930 00:39:22.691168 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:39:22 crc kubenswrapper[4809]: E0930 00:39:22.691493 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:39:23 crc kubenswrapper[4809]: I0930 00:39:23.701340 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e71457ea-c5a1-4a8b-8524-68181a838ffd" path="/var/lib/kubelet/pods/e71457ea-c5a1-4a8b-8524-68181a838ffd/volumes" Sep 30 00:39:24 crc kubenswrapper[4809]: I0930 00:39:24.042569 4809 generic.go:334] "Generic (PLEG): container finished" podID="9a35eaeb-456c-42d9-bd9a-9eb736876d56" containerID="fecb52858e5c57ce2303dab098f2883d803e3006fbc4b6c7e27cefafe6d44169" exitCode=0 Sep 30 00:39:24 crc kubenswrapper[4809]: I0930 00:39:24.042635 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg" event={"ID":"9a35eaeb-456c-42d9-bd9a-9eb736876d56","Type":"ContainerDied","Data":"fecb52858e5c57ce2303dab098f2883d803e3006fbc4b6c7e27cefafe6d44169"} Sep 30 00:39:25 crc kubenswrapper[4809]: I0930 00:39:25.561687 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg" Sep 30 00:39:25 crc kubenswrapper[4809]: I0930 00:39:25.696903 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkhrl\" (UniqueName: \"kubernetes.io/projected/9a35eaeb-456c-42d9-bd9a-9eb736876d56-kube-api-access-nkhrl\") pod \"9a35eaeb-456c-42d9-bd9a-9eb736876d56\" (UID: \"9a35eaeb-456c-42d9-bd9a-9eb736876d56\") " Sep 30 00:39:25 crc kubenswrapper[4809]: I0930 00:39:25.697018 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9a35eaeb-456c-42d9-bd9a-9eb736876d56-ssh-key\") pod \"9a35eaeb-456c-42d9-bd9a-9eb736876d56\" (UID: \"9a35eaeb-456c-42d9-bd9a-9eb736876d56\") " Sep 30 00:39:25 crc kubenswrapper[4809]: I0930 00:39:25.697218 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9a35eaeb-456c-42d9-bd9a-9eb736876d56-inventory\") pod \"9a35eaeb-456c-42d9-bd9a-9eb736876d56\" (UID: \"9a35eaeb-456c-42d9-bd9a-9eb736876d56\") " Sep 30 00:39:25 crc kubenswrapper[4809]: I0930 00:39:25.704406 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a35eaeb-456c-42d9-bd9a-9eb736876d56-kube-api-access-nkhrl" (OuterVolumeSpecName: "kube-api-access-nkhrl") pod "9a35eaeb-456c-42d9-bd9a-9eb736876d56" (UID: "9a35eaeb-456c-42d9-bd9a-9eb736876d56"). InnerVolumeSpecName "kube-api-access-nkhrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:39:25 crc kubenswrapper[4809]: I0930 00:39:25.730813 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a35eaeb-456c-42d9-bd9a-9eb736876d56-inventory" (OuterVolumeSpecName: "inventory") pod "9a35eaeb-456c-42d9-bd9a-9eb736876d56" (UID: "9a35eaeb-456c-42d9-bd9a-9eb736876d56"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:39:25 crc kubenswrapper[4809]: I0930 00:39:25.748312 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a35eaeb-456c-42d9-bd9a-9eb736876d56-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9a35eaeb-456c-42d9-bd9a-9eb736876d56" (UID: "9a35eaeb-456c-42d9-bd9a-9eb736876d56"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:39:25 crc kubenswrapper[4809]: I0930 00:39:25.799388 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkhrl\" (UniqueName: \"kubernetes.io/projected/9a35eaeb-456c-42d9-bd9a-9eb736876d56-kube-api-access-nkhrl\") on node \"crc\" DevicePath \"\"" Sep 30 00:39:25 crc kubenswrapper[4809]: I0930 00:39:25.799416 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9a35eaeb-456c-42d9-bd9a-9eb736876d56-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:39:25 crc kubenswrapper[4809]: I0930 00:39:25.799427 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9a35eaeb-456c-42d9-bd9a-9eb736876d56-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.064285 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg" event={"ID":"9a35eaeb-456c-42d9-bd9a-9eb736876d56","Type":"ContainerDied","Data":"72b9224d0e036ee978af69ab7191867c91c0b86dafc48b9c91b57713c09739c7"} Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.064345 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg" Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.064661 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72b9224d0e036ee978af69ab7191867c91c0b86dafc48b9c91b57713c09739c7" Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.181090 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d"] Sep 30 00:39:26 crc kubenswrapper[4809]: E0930 00:39:26.181518 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a35eaeb-456c-42d9-bd9a-9eb736876d56" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.181538 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a35eaeb-456c-42d9-bd9a-9eb736876d56" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.181804 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a35eaeb-456c-42d9-bd9a-9eb736876d56" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.182532 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d" Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.185283 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.185385 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.185533 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.192344 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.193874 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d"] Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.309157 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2cdfd6c1-789a-484b-b8bc-d225b4690da5-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d\" (UID: \"2cdfd6c1-789a-484b-b8bc-d225b4690da5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d" Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.309464 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2cdfd6c1-789a-484b-b8bc-d225b4690da5-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d\" (UID: \"2cdfd6c1-789a-484b-b8bc-d225b4690da5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d" Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.309528 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbbxv\" (UniqueName: \"kubernetes.io/projected/2cdfd6c1-789a-484b-b8bc-d225b4690da5-kube-api-access-sbbxv\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d\" (UID: \"2cdfd6c1-789a-484b-b8bc-d225b4690da5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d" Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.411499 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2cdfd6c1-789a-484b-b8bc-d225b4690da5-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d\" (UID: \"2cdfd6c1-789a-484b-b8bc-d225b4690da5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d" Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.411782 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2cdfd6c1-789a-484b-b8bc-d225b4690da5-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d\" (UID: \"2cdfd6c1-789a-484b-b8bc-d225b4690da5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d" Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.411836 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbbxv\" (UniqueName: \"kubernetes.io/projected/2cdfd6c1-789a-484b-b8bc-d225b4690da5-kube-api-access-sbbxv\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d\" (UID: \"2cdfd6c1-789a-484b-b8bc-d225b4690da5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d" Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.415424 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2cdfd6c1-789a-484b-b8bc-d225b4690da5-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d\" (UID: \"2cdfd6c1-789a-484b-b8bc-d225b4690da5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d" Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.415563 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2cdfd6c1-789a-484b-b8bc-d225b4690da5-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d\" (UID: \"2cdfd6c1-789a-484b-b8bc-d225b4690da5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d" Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.433785 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbbxv\" (UniqueName: \"kubernetes.io/projected/2cdfd6c1-789a-484b-b8bc-d225b4690da5-kube-api-access-sbbxv\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d\" (UID: \"2cdfd6c1-789a-484b-b8bc-d225b4690da5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d" Sep 30 00:39:26 crc kubenswrapper[4809]: I0930 00:39:26.501237 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d" Sep 30 00:39:27 crc kubenswrapper[4809]: I0930 00:39:27.092633 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d"] Sep 30 00:39:27 crc kubenswrapper[4809]: W0930 00:39:27.100058 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2cdfd6c1_789a_484b_b8bc_d225b4690da5.slice/crio-c0ec642ef419fe78116cb50cee08736eff291e7f649d9814aa1c42034fba126a WatchSource:0}: Error finding container c0ec642ef419fe78116cb50cee08736eff291e7f649d9814aa1c42034fba126a: Status 404 returned error can't find the container with id c0ec642ef419fe78116cb50cee08736eff291e7f649d9814aa1c42034fba126a Sep 30 00:39:28 crc kubenswrapper[4809]: I0930 00:39:28.099263 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d" event={"ID":"2cdfd6c1-789a-484b-b8bc-d225b4690da5","Type":"ContainerStarted","Data":"bc7fd630d77d2aeb9260ba4e6b148b9d2cf0634d4063227e474ebf8152cf95b5"} Sep 30 00:39:28 crc kubenswrapper[4809]: I0930 00:39:28.099595 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d" event={"ID":"2cdfd6c1-789a-484b-b8bc-d225b4690da5","Type":"ContainerStarted","Data":"c0ec642ef419fe78116cb50cee08736eff291e7f649d9814aa1c42034fba126a"} Sep 30 00:39:28 crc kubenswrapper[4809]: I0930 00:39:28.115465 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d" podStartSLOduration=1.408116136 podStartE2EDuration="2.115450197s" podCreationTimestamp="2025-09-30 00:39:26 +0000 UTC" firstStartedPulling="2025-09-30 00:39:27.105732896 +0000 UTC m=+1818.141982334" lastFinishedPulling="2025-09-30 00:39:27.813066987 +0000 UTC m=+1818.849316395" observedRunningTime="2025-09-30 00:39:28.115390876 +0000 UTC m=+1819.151640304" watchObservedRunningTime="2025-09-30 00:39:28.115450197 +0000 UTC m=+1819.151699605" Sep 30 00:39:33 crc kubenswrapper[4809]: I0930 00:39:33.164797 4809 generic.go:334] "Generic (PLEG): container finished" podID="2cdfd6c1-789a-484b-b8bc-d225b4690da5" containerID="bc7fd630d77d2aeb9260ba4e6b148b9d2cf0634d4063227e474ebf8152cf95b5" exitCode=0 Sep 30 00:39:33 crc kubenswrapper[4809]: I0930 00:39:33.165314 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d" event={"ID":"2cdfd6c1-789a-484b-b8bc-d225b4690da5","Type":"ContainerDied","Data":"bc7fd630d77d2aeb9260ba4e6b148b9d2cf0634d4063227e474ebf8152cf95b5"} Sep 30 00:39:34 crc kubenswrapper[4809]: I0930 00:39:34.655278 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d" Sep 30 00:39:34 crc kubenswrapper[4809]: I0930 00:39:34.701746 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2cdfd6c1-789a-484b-b8bc-d225b4690da5-inventory\") pod \"2cdfd6c1-789a-484b-b8bc-d225b4690da5\" (UID: \"2cdfd6c1-789a-484b-b8bc-d225b4690da5\") " Sep 30 00:39:34 crc kubenswrapper[4809]: I0930 00:39:34.701854 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbbxv\" (UniqueName: \"kubernetes.io/projected/2cdfd6c1-789a-484b-b8bc-d225b4690da5-kube-api-access-sbbxv\") pod \"2cdfd6c1-789a-484b-b8bc-d225b4690da5\" (UID: \"2cdfd6c1-789a-484b-b8bc-d225b4690da5\") " Sep 30 00:39:34 crc kubenswrapper[4809]: I0930 00:39:34.701885 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2cdfd6c1-789a-484b-b8bc-d225b4690da5-ssh-key\") pod \"2cdfd6c1-789a-484b-b8bc-d225b4690da5\" (UID: \"2cdfd6c1-789a-484b-b8bc-d225b4690da5\") " Sep 30 00:39:34 crc kubenswrapper[4809]: I0930 00:39:34.713240 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cdfd6c1-789a-484b-b8bc-d225b4690da5-kube-api-access-sbbxv" (OuterVolumeSpecName: "kube-api-access-sbbxv") pod "2cdfd6c1-789a-484b-b8bc-d225b4690da5" (UID: "2cdfd6c1-789a-484b-b8bc-d225b4690da5"). InnerVolumeSpecName "kube-api-access-sbbxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:39:34 crc kubenswrapper[4809]: I0930 00:39:34.731862 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cdfd6c1-789a-484b-b8bc-d225b4690da5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2cdfd6c1-789a-484b-b8bc-d225b4690da5" (UID: "2cdfd6c1-789a-484b-b8bc-d225b4690da5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:39:34 crc kubenswrapper[4809]: I0930 00:39:34.744598 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cdfd6c1-789a-484b-b8bc-d225b4690da5-inventory" (OuterVolumeSpecName: "inventory") pod "2cdfd6c1-789a-484b-b8bc-d225b4690da5" (UID: "2cdfd6c1-789a-484b-b8bc-d225b4690da5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:39:34 crc kubenswrapper[4809]: I0930 00:39:34.807010 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2cdfd6c1-789a-484b-b8bc-d225b4690da5-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:39:34 crc kubenswrapper[4809]: I0930 00:39:34.807037 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbbxv\" (UniqueName: \"kubernetes.io/projected/2cdfd6c1-789a-484b-b8bc-d225b4690da5-kube-api-access-sbbxv\") on node \"crc\" DevicePath \"\"" Sep 30 00:39:34 crc kubenswrapper[4809]: I0930 00:39:34.807047 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2cdfd6c1-789a-484b-b8bc-d225b4690da5-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.038347 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-lnq6w"] Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.048107 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-lnq6w"] Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.187876 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d" event={"ID":"2cdfd6c1-789a-484b-b8bc-d225b4690da5","Type":"ContainerDied","Data":"c0ec642ef419fe78116cb50cee08736eff291e7f649d9814aa1c42034fba126a"} Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.187953 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c0ec642ef419fe78116cb50cee08736eff291e7f649d9814aa1c42034fba126a" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.188047 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.259952 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd"] Sep 30 00:39:35 crc kubenswrapper[4809]: E0930 00:39:35.260388 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cdfd6c1-789a-484b-b8bc-d225b4690da5" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.260404 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cdfd6c1-789a-484b-b8bc-d225b4690da5" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.260667 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cdfd6c1-789a-484b-b8bc-d225b4690da5" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.261409 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.263445 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.268609 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.268673 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.268820 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.276894 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd"] Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.315848 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9dc1b815-383d-4f16-a8db-cfa3e13a66f4-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-chbvd\" (UID: \"9dc1b815-383d-4f16-a8db-cfa3e13a66f4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.315904 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2spdp\" (UniqueName: \"kubernetes.io/projected/9dc1b815-383d-4f16-a8db-cfa3e13a66f4-kube-api-access-2spdp\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-chbvd\" (UID: \"9dc1b815-383d-4f16-a8db-cfa3e13a66f4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.315937 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9dc1b815-383d-4f16-a8db-cfa3e13a66f4-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-chbvd\" (UID: \"9dc1b815-383d-4f16-a8db-cfa3e13a66f4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.418239 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9dc1b815-383d-4f16-a8db-cfa3e13a66f4-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-chbvd\" (UID: \"9dc1b815-383d-4f16-a8db-cfa3e13a66f4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.418283 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2spdp\" (UniqueName: \"kubernetes.io/projected/9dc1b815-383d-4f16-a8db-cfa3e13a66f4-kube-api-access-2spdp\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-chbvd\" (UID: \"9dc1b815-383d-4f16-a8db-cfa3e13a66f4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.418316 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9dc1b815-383d-4f16-a8db-cfa3e13a66f4-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-chbvd\" (UID: \"9dc1b815-383d-4f16-a8db-cfa3e13a66f4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.421923 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9dc1b815-383d-4f16-a8db-cfa3e13a66f4-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-chbvd\" (UID: \"9dc1b815-383d-4f16-a8db-cfa3e13a66f4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.424013 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9dc1b815-383d-4f16-a8db-cfa3e13a66f4-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-chbvd\" (UID: \"9dc1b815-383d-4f16-a8db-cfa3e13a66f4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.436214 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2spdp\" (UniqueName: \"kubernetes.io/projected/9dc1b815-383d-4f16-a8db-cfa3e13a66f4-kube-api-access-2spdp\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-chbvd\" (UID: \"9dc1b815-383d-4f16-a8db-cfa3e13a66f4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.586401 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.695232 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:39:35 crc kubenswrapper[4809]: E0930 00:39:35.695452 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:39:35 crc kubenswrapper[4809]: I0930 00:39:35.732975 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3c04595-d3a8-4477-9daf-46b43a8750dd" path="/var/lib/kubelet/pods/e3c04595-d3a8-4477-9daf-46b43a8750dd/volumes" Sep 30 00:39:36 crc kubenswrapper[4809]: I0930 00:39:36.156485 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd"] Sep 30 00:39:36 crc kubenswrapper[4809]: I0930 00:39:36.160698 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 00:39:36 crc kubenswrapper[4809]: I0930 00:39:36.209602 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd" event={"ID":"9dc1b815-383d-4f16-a8db-cfa3e13a66f4","Type":"ContainerStarted","Data":"e1005e2b26e7b8a020bf90d0feacf407563855fbd00a65d0904735920cdc4466"} Sep 30 00:39:37 crc kubenswrapper[4809]: I0930 00:39:37.220933 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd" event={"ID":"9dc1b815-383d-4f16-a8db-cfa3e13a66f4","Type":"ContainerStarted","Data":"5bc24f5c5300e4b59c45fd3f4d2d439c80626e9b210eb608bbc2a7be694a1acf"} Sep 30 00:39:37 crc kubenswrapper[4809]: I0930 00:39:37.237192 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd" podStartSLOduration=1.636546885 podStartE2EDuration="2.23717574s" podCreationTimestamp="2025-09-30 00:39:35 +0000 UTC" firstStartedPulling="2025-09-30 00:39:36.158658195 +0000 UTC m=+1827.194907603" lastFinishedPulling="2025-09-30 00:39:36.75928705 +0000 UTC m=+1827.795536458" observedRunningTime="2025-09-30 00:39:37.234893777 +0000 UTC m=+1828.271143195" watchObservedRunningTime="2025-09-30 00:39:37.23717574 +0000 UTC m=+1828.273425138" Sep 30 00:39:50 crc kubenswrapper[4809]: I0930 00:39:50.046123 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-j7xdb"] Sep 30 00:39:50 crc kubenswrapper[4809]: I0930 00:39:50.060030 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-j7xdb"] Sep 30 00:39:50 crc kubenswrapper[4809]: I0930 00:39:50.691592 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:39:50 crc kubenswrapper[4809]: E0930 00:39:50.691880 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:39:51 crc kubenswrapper[4809]: I0930 00:39:51.707859 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0424380-2493-4499-8314-274a6af06aa4" path="/var/lib/kubelet/pods/a0424380-2493-4499-8314-274a6af06aa4/volumes" Sep 30 00:39:58 crc kubenswrapper[4809]: I0930 00:39:58.039494 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-q8nkh"] Sep 30 00:39:58 crc kubenswrapper[4809]: I0930 00:39:58.062186 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-q8nkh"] Sep 30 00:39:59 crc kubenswrapper[4809]: I0930 00:39:59.704134 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e2cc1eb-6303-4607-a467-d21cc3c5d25d" path="/var/lib/kubelet/pods/2e2cc1eb-6303-4607-a467-d21cc3c5d25d/volumes" Sep 30 00:40:01 crc kubenswrapper[4809]: I0930 00:40:01.047824 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-v6m7r"] Sep 30 00:40:01 crc kubenswrapper[4809]: I0930 00:40:01.060305 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-v6m7r"] Sep 30 00:40:01 crc kubenswrapper[4809]: I0930 00:40:01.708803 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="956e79d1-b7da-4b76-8ba6-21dd3838aff1" path="/var/lib/kubelet/pods/956e79d1-b7da-4b76-8ba6-21dd3838aff1/volumes" Sep 30 00:40:05 crc kubenswrapper[4809]: I0930 00:40:05.691589 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:40:06 crc kubenswrapper[4809]: I0930 00:40:06.580984 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"13902e7754cc13427dac3722dc2f321035defa3ee2cdd26c1b7de958ff393f88"} Sep 30 00:40:09 crc kubenswrapper[4809]: I0930 00:40:09.032460 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-jkrgs"] Sep 30 00:40:09 crc kubenswrapper[4809]: I0930 00:40:09.046367 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-jkrgs"] Sep 30 00:40:09 crc kubenswrapper[4809]: I0930 00:40:09.705597 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1ccba93-c72a-4aea-b972-c54bbe6bfc99" path="/var/lib/kubelet/pods/a1ccba93-c72a-4aea-b972-c54bbe6bfc99/volumes" Sep 30 00:40:13 crc kubenswrapper[4809]: I0930 00:40:13.814092 4809 scope.go:117] "RemoveContainer" containerID="56c82cab30fc23b9821f8a17e4b7e7236f69ca46bd2e318ca796316377904af0" Sep 30 00:40:13 crc kubenswrapper[4809]: I0930 00:40:13.854230 4809 scope.go:117] "RemoveContainer" containerID="4f5792ddd193c4566d9b946b885da6715b32632d7a3d069d8dc77fa7506830d4" Sep 30 00:40:13 crc kubenswrapper[4809]: I0930 00:40:13.912155 4809 scope.go:117] "RemoveContainer" containerID="613fad92757ab6cf194a11100d250e3170421fe74f47b479543d4b633584a052" Sep 30 00:40:13 crc kubenswrapper[4809]: I0930 00:40:13.964566 4809 scope.go:117] "RemoveContainer" containerID="c4b4d496c7b5fc826a057c826cfaf62782016b670bf896f7ae51b9871331c63b" Sep 30 00:40:14 crc kubenswrapper[4809]: I0930 00:40:14.031669 4809 scope.go:117] "RemoveContainer" containerID="8d419f218018f9f557437322cf15156ebe4c2a4248368f6646bd9fb37412346f" Sep 30 00:40:14 crc kubenswrapper[4809]: I0930 00:40:14.082424 4809 scope.go:117] "RemoveContainer" containerID="00e21decf5cad846ef0c19e1ed9c06bb4fe8db15585a3cb67a239628dd8195f6" Sep 30 00:40:17 crc kubenswrapper[4809]: I0930 00:40:17.741045 4809 generic.go:334] "Generic (PLEG): container finished" podID="9dc1b815-383d-4f16-a8db-cfa3e13a66f4" containerID="5bc24f5c5300e4b59c45fd3f4d2d439c80626e9b210eb608bbc2a7be694a1acf" exitCode=0 Sep 30 00:40:17 crc kubenswrapper[4809]: I0930 00:40:17.741183 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd" event={"ID":"9dc1b815-383d-4f16-a8db-cfa3e13a66f4","Type":"ContainerDied","Data":"5bc24f5c5300e4b59c45fd3f4d2d439c80626e9b210eb608bbc2a7be694a1acf"} Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.036784 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-qwzxp"] Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.048441 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-qwzxp"] Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.244297 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd" Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.329331 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2spdp\" (UniqueName: \"kubernetes.io/projected/9dc1b815-383d-4f16-a8db-cfa3e13a66f4-kube-api-access-2spdp\") pod \"9dc1b815-383d-4f16-a8db-cfa3e13a66f4\" (UID: \"9dc1b815-383d-4f16-a8db-cfa3e13a66f4\") " Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.329476 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9dc1b815-383d-4f16-a8db-cfa3e13a66f4-ssh-key\") pod \"9dc1b815-383d-4f16-a8db-cfa3e13a66f4\" (UID: \"9dc1b815-383d-4f16-a8db-cfa3e13a66f4\") " Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.329695 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9dc1b815-383d-4f16-a8db-cfa3e13a66f4-inventory\") pod \"9dc1b815-383d-4f16-a8db-cfa3e13a66f4\" (UID: \"9dc1b815-383d-4f16-a8db-cfa3e13a66f4\") " Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.335899 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dc1b815-383d-4f16-a8db-cfa3e13a66f4-kube-api-access-2spdp" (OuterVolumeSpecName: "kube-api-access-2spdp") pod "9dc1b815-383d-4f16-a8db-cfa3e13a66f4" (UID: "9dc1b815-383d-4f16-a8db-cfa3e13a66f4"). InnerVolumeSpecName "kube-api-access-2spdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.359378 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dc1b815-383d-4f16-a8db-cfa3e13a66f4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9dc1b815-383d-4f16-a8db-cfa3e13a66f4" (UID: "9dc1b815-383d-4f16-a8db-cfa3e13a66f4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.383118 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dc1b815-383d-4f16-a8db-cfa3e13a66f4-inventory" (OuterVolumeSpecName: "inventory") pod "9dc1b815-383d-4f16-a8db-cfa3e13a66f4" (UID: "9dc1b815-383d-4f16-a8db-cfa3e13a66f4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.431968 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9dc1b815-383d-4f16-a8db-cfa3e13a66f4-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.432000 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2spdp\" (UniqueName: \"kubernetes.io/projected/9dc1b815-383d-4f16-a8db-cfa3e13a66f4-kube-api-access-2spdp\") on node \"crc\" DevicePath \"\"" Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.432012 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9dc1b815-383d-4f16-a8db-cfa3e13a66f4-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.716317 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f26e19e3-0ece-4f4d-aa5a-016fbfd929a5" path="/var/lib/kubelet/pods/f26e19e3-0ece-4f4d-aa5a-016fbfd929a5/volumes" Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.763229 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd" event={"ID":"9dc1b815-383d-4f16-a8db-cfa3e13a66f4","Type":"ContainerDied","Data":"e1005e2b26e7b8a020bf90d0feacf407563855fbd00a65d0904735920cdc4466"} Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.763280 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1005e2b26e7b8a020bf90d0feacf407563855fbd00a65d0904735920cdc4466" Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.763285 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd" Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.862551 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4"] Sep 30 00:40:19 crc kubenswrapper[4809]: E0930 00:40:19.863156 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dc1b815-383d-4f16-a8db-cfa3e13a66f4" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.863177 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dc1b815-383d-4f16-a8db-cfa3e13a66f4" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.863438 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dc1b815-383d-4f16-a8db-cfa3e13a66f4" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.864344 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4" Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.866956 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.867180 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.867327 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.867533 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 00:40:19 crc kubenswrapper[4809]: I0930 00:40:19.876118 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4"] Sep 30 00:40:20 crc kubenswrapper[4809]: I0930 00:40:20.043053 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1b62f711-5eaf-4f0c-b2aa-e29f893a1871-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4\" (UID: \"1b62f711-5eaf-4f0c-b2aa-e29f893a1871\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4" Sep 30 00:40:20 crc kubenswrapper[4809]: I0930 00:40:20.043837 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8p54t\" (UniqueName: \"kubernetes.io/projected/1b62f711-5eaf-4f0c-b2aa-e29f893a1871-kube-api-access-8p54t\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4\" (UID: \"1b62f711-5eaf-4f0c-b2aa-e29f893a1871\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4" Sep 30 00:40:20 crc kubenswrapper[4809]: I0930 00:40:20.044024 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1b62f711-5eaf-4f0c-b2aa-e29f893a1871-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4\" (UID: \"1b62f711-5eaf-4f0c-b2aa-e29f893a1871\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4" Sep 30 00:40:20 crc kubenswrapper[4809]: I0930 00:40:20.146465 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1b62f711-5eaf-4f0c-b2aa-e29f893a1871-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4\" (UID: \"1b62f711-5eaf-4f0c-b2aa-e29f893a1871\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4" Sep 30 00:40:20 crc kubenswrapper[4809]: I0930 00:40:20.146623 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8p54t\" (UniqueName: \"kubernetes.io/projected/1b62f711-5eaf-4f0c-b2aa-e29f893a1871-kube-api-access-8p54t\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4\" (UID: \"1b62f711-5eaf-4f0c-b2aa-e29f893a1871\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4" Sep 30 00:40:20 crc kubenswrapper[4809]: I0930 00:40:20.146740 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1b62f711-5eaf-4f0c-b2aa-e29f893a1871-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4\" (UID: \"1b62f711-5eaf-4f0c-b2aa-e29f893a1871\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4" Sep 30 00:40:20 crc kubenswrapper[4809]: I0930 00:40:20.152779 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1b62f711-5eaf-4f0c-b2aa-e29f893a1871-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4\" (UID: \"1b62f711-5eaf-4f0c-b2aa-e29f893a1871\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4" Sep 30 00:40:20 crc kubenswrapper[4809]: I0930 00:40:20.153500 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1b62f711-5eaf-4f0c-b2aa-e29f893a1871-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4\" (UID: \"1b62f711-5eaf-4f0c-b2aa-e29f893a1871\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4" Sep 30 00:40:20 crc kubenswrapper[4809]: I0930 00:40:20.169138 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8p54t\" (UniqueName: \"kubernetes.io/projected/1b62f711-5eaf-4f0c-b2aa-e29f893a1871-kube-api-access-8p54t\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4\" (UID: \"1b62f711-5eaf-4f0c-b2aa-e29f893a1871\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4" Sep 30 00:40:20 crc kubenswrapper[4809]: I0930 00:40:20.196525 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4" Sep 30 00:40:20 crc kubenswrapper[4809]: W0930 00:40:20.757432 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1b62f711_5eaf_4f0c_b2aa_e29f893a1871.slice/crio-5ea2a909db95a8dd9806ffd58c4508d7cdb3e8346256894ee0b0115be5d4cd0e WatchSource:0}: Error finding container 5ea2a909db95a8dd9806ffd58c4508d7cdb3e8346256894ee0b0115be5d4cd0e: Status 404 returned error can't find the container with id 5ea2a909db95a8dd9806ffd58c4508d7cdb3e8346256894ee0b0115be5d4cd0e Sep 30 00:40:20 crc kubenswrapper[4809]: I0930 00:40:20.758028 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4"] Sep 30 00:40:20 crc kubenswrapper[4809]: I0930 00:40:20.773286 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4" event={"ID":"1b62f711-5eaf-4f0c-b2aa-e29f893a1871","Type":"ContainerStarted","Data":"5ea2a909db95a8dd9806ffd58c4508d7cdb3e8346256894ee0b0115be5d4cd0e"} Sep 30 00:40:21 crc kubenswrapper[4809]: I0930 00:40:21.786876 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4" event={"ID":"1b62f711-5eaf-4f0c-b2aa-e29f893a1871","Type":"ContainerStarted","Data":"15127d28123495dec6101b7b7d5b58f030c05076140c43a088ea442fb1f2d814"} Sep 30 00:40:21 crc kubenswrapper[4809]: I0930 00:40:21.808516 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4" podStartSLOduration=2.279431928 podStartE2EDuration="2.808499111s" podCreationTimestamp="2025-09-30 00:40:19 +0000 UTC" firstStartedPulling="2025-09-30 00:40:20.760462653 +0000 UTC m=+1871.796712061" lastFinishedPulling="2025-09-30 00:40:21.289529826 +0000 UTC m=+1872.325779244" observedRunningTime="2025-09-30 00:40:21.806965778 +0000 UTC m=+1872.843215186" watchObservedRunningTime="2025-09-30 00:40:21.808499111 +0000 UTC m=+1872.844748519" Sep 30 00:40:26 crc kubenswrapper[4809]: I0930 00:40:26.842277 4809 generic.go:334] "Generic (PLEG): container finished" podID="1b62f711-5eaf-4f0c-b2aa-e29f893a1871" containerID="15127d28123495dec6101b7b7d5b58f030c05076140c43a088ea442fb1f2d814" exitCode=0 Sep 30 00:40:26 crc kubenswrapper[4809]: I0930 00:40:26.842799 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4" event={"ID":"1b62f711-5eaf-4f0c-b2aa-e29f893a1871","Type":"ContainerDied","Data":"15127d28123495dec6101b7b7d5b58f030c05076140c43a088ea442fb1f2d814"} Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.265011 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4" Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.423196 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8p54t\" (UniqueName: \"kubernetes.io/projected/1b62f711-5eaf-4f0c-b2aa-e29f893a1871-kube-api-access-8p54t\") pod \"1b62f711-5eaf-4f0c-b2aa-e29f893a1871\" (UID: \"1b62f711-5eaf-4f0c-b2aa-e29f893a1871\") " Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.423292 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1b62f711-5eaf-4f0c-b2aa-e29f893a1871-inventory\") pod \"1b62f711-5eaf-4f0c-b2aa-e29f893a1871\" (UID: \"1b62f711-5eaf-4f0c-b2aa-e29f893a1871\") " Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.423396 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1b62f711-5eaf-4f0c-b2aa-e29f893a1871-ssh-key\") pod \"1b62f711-5eaf-4f0c-b2aa-e29f893a1871\" (UID: \"1b62f711-5eaf-4f0c-b2aa-e29f893a1871\") " Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.429122 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b62f711-5eaf-4f0c-b2aa-e29f893a1871-kube-api-access-8p54t" (OuterVolumeSpecName: "kube-api-access-8p54t") pod "1b62f711-5eaf-4f0c-b2aa-e29f893a1871" (UID: "1b62f711-5eaf-4f0c-b2aa-e29f893a1871"). InnerVolumeSpecName "kube-api-access-8p54t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.458800 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b62f711-5eaf-4f0c-b2aa-e29f893a1871-inventory" (OuterVolumeSpecName: "inventory") pod "1b62f711-5eaf-4f0c-b2aa-e29f893a1871" (UID: "1b62f711-5eaf-4f0c-b2aa-e29f893a1871"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.463047 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b62f711-5eaf-4f0c-b2aa-e29f893a1871-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1b62f711-5eaf-4f0c-b2aa-e29f893a1871" (UID: "1b62f711-5eaf-4f0c-b2aa-e29f893a1871"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.525469 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8p54t\" (UniqueName: \"kubernetes.io/projected/1b62f711-5eaf-4f0c-b2aa-e29f893a1871-kube-api-access-8p54t\") on node \"crc\" DevicePath \"\"" Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.525505 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1b62f711-5eaf-4f0c-b2aa-e29f893a1871-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.525514 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1b62f711-5eaf-4f0c-b2aa-e29f893a1871-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.874896 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4" event={"ID":"1b62f711-5eaf-4f0c-b2aa-e29f893a1871","Type":"ContainerDied","Data":"5ea2a909db95a8dd9806ffd58c4508d7cdb3e8346256894ee0b0115be5d4cd0e"} Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.874965 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ea2a909db95a8dd9806ffd58c4508d7cdb3e8346256894ee0b0115be5d4cd0e" Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.875057 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4" Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.974908 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt"] Sep 30 00:40:28 crc kubenswrapper[4809]: E0930 00:40:28.975502 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b62f711-5eaf-4f0c-b2aa-e29f893a1871" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.975526 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b62f711-5eaf-4f0c-b2aa-e29f893a1871" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.975914 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b62f711-5eaf-4f0c-b2aa-e29f893a1871" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.976906 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt" Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.980174 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.980376 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.980671 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.980965 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:40:28 crc kubenswrapper[4809]: I0930 00:40:28.991898 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt"] Sep 30 00:40:29 crc kubenswrapper[4809]: I0930 00:40:29.138001 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/740f4755-38d2-4150-85b8-e69d10d305b8-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jzklt\" (UID: \"740f4755-38d2-4150-85b8-e69d10d305b8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt" Sep 30 00:40:29 crc kubenswrapper[4809]: I0930 00:40:29.138066 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fh78\" (UniqueName: \"kubernetes.io/projected/740f4755-38d2-4150-85b8-e69d10d305b8-kube-api-access-5fh78\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jzklt\" (UID: \"740f4755-38d2-4150-85b8-e69d10d305b8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt" Sep 30 00:40:29 crc kubenswrapper[4809]: I0930 00:40:29.138165 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/740f4755-38d2-4150-85b8-e69d10d305b8-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jzklt\" (UID: \"740f4755-38d2-4150-85b8-e69d10d305b8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt" Sep 30 00:40:29 crc kubenswrapper[4809]: I0930 00:40:29.239659 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/740f4755-38d2-4150-85b8-e69d10d305b8-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jzklt\" (UID: \"740f4755-38d2-4150-85b8-e69d10d305b8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt" Sep 30 00:40:29 crc kubenswrapper[4809]: I0930 00:40:29.239955 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fh78\" (UniqueName: \"kubernetes.io/projected/740f4755-38d2-4150-85b8-e69d10d305b8-kube-api-access-5fh78\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jzklt\" (UID: \"740f4755-38d2-4150-85b8-e69d10d305b8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt" Sep 30 00:40:29 crc kubenswrapper[4809]: I0930 00:40:29.240011 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/740f4755-38d2-4150-85b8-e69d10d305b8-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jzklt\" (UID: \"740f4755-38d2-4150-85b8-e69d10d305b8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt" Sep 30 00:40:29 crc kubenswrapper[4809]: I0930 00:40:29.243548 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/740f4755-38d2-4150-85b8-e69d10d305b8-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jzklt\" (UID: \"740f4755-38d2-4150-85b8-e69d10d305b8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt" Sep 30 00:40:29 crc kubenswrapper[4809]: I0930 00:40:29.244278 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/740f4755-38d2-4150-85b8-e69d10d305b8-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jzklt\" (UID: \"740f4755-38d2-4150-85b8-e69d10d305b8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt" Sep 30 00:40:29 crc kubenswrapper[4809]: I0930 00:40:29.279077 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fh78\" (UniqueName: \"kubernetes.io/projected/740f4755-38d2-4150-85b8-e69d10d305b8-kube-api-access-5fh78\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jzklt\" (UID: \"740f4755-38d2-4150-85b8-e69d10d305b8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt" Sep 30 00:40:29 crc kubenswrapper[4809]: I0930 00:40:29.305465 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt" Sep 30 00:40:29 crc kubenswrapper[4809]: I0930 00:40:29.856977 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt"] Sep 30 00:40:29 crc kubenswrapper[4809]: I0930 00:40:29.885413 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt" event={"ID":"740f4755-38d2-4150-85b8-e69d10d305b8","Type":"ContainerStarted","Data":"3a18b6443d7601c993e704f81f6cdd1f5e875ac3b905640c0de9c3edfab37057"} Sep 30 00:40:30 crc kubenswrapper[4809]: I0930 00:40:30.895107 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt" event={"ID":"740f4755-38d2-4150-85b8-e69d10d305b8","Type":"ContainerStarted","Data":"3c44ea3aa91a201d9057fb4073c6d344f35e4a3701d9a409bdf256f3dc935712"} Sep 30 00:40:30 crc kubenswrapper[4809]: I0930 00:40:30.922917 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt" podStartSLOduration=2.406265481 podStartE2EDuration="2.92289819s" podCreationTimestamp="2025-09-30 00:40:28 +0000 UTC" firstStartedPulling="2025-09-30 00:40:29.861607863 +0000 UTC m=+1880.897857301" lastFinishedPulling="2025-09-30 00:40:30.378240592 +0000 UTC m=+1881.414490010" observedRunningTime="2025-09-30 00:40:30.915428903 +0000 UTC m=+1881.951678331" watchObservedRunningTime="2025-09-30 00:40:30.92289819 +0000 UTC m=+1881.959147598" Sep 30 00:40:58 crc kubenswrapper[4809]: I0930 00:40:58.041064 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-bh5sc"] Sep 30 00:40:58 crc kubenswrapper[4809]: I0930 00:40:58.049390 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-bh5sc"] Sep 30 00:40:59 crc kubenswrapper[4809]: I0930 00:40:59.049272 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-96wlx"] Sep 30 00:40:59 crc kubenswrapper[4809]: I0930 00:40:59.062154 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-pkq57"] Sep 30 00:40:59 crc kubenswrapper[4809]: I0930 00:40:59.072473 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-96wlx"] Sep 30 00:40:59 crc kubenswrapper[4809]: I0930 00:40:59.080943 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-pkq57"] Sep 30 00:40:59 crc kubenswrapper[4809]: I0930 00:40:59.725946 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8de17215-f27c-4eab-a6f5-0ad924367b11" path="/var/lib/kubelet/pods/8de17215-f27c-4eab-a6f5-0ad924367b11/volumes" Sep 30 00:40:59 crc kubenswrapper[4809]: I0930 00:40:59.727234 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9152f576-abed-4cb6-97ee-3a4efe2ba97f" path="/var/lib/kubelet/pods/9152f576-abed-4cb6-97ee-3a4efe2ba97f/volumes" Sep 30 00:40:59 crc kubenswrapper[4809]: I0930 00:40:59.728399 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97460645-6cce-4b0f-b2a2-d80caee414cd" path="/var/lib/kubelet/pods/97460645-6cce-4b0f-b2a2-d80caee414cd/volumes" Sep 30 00:41:14 crc kubenswrapper[4809]: I0930 00:41:14.230821 4809 scope.go:117] "RemoveContainer" containerID="15bcf4a1213bb2857aaa2d46c105a7cf69375731605bc128731ea2b59f3e50f3" Sep 30 00:41:14 crc kubenswrapper[4809]: I0930 00:41:14.277602 4809 scope.go:117] "RemoveContainer" containerID="505d37050ffa3c58311c89ad872b639b21b9eaf4982d541a778c92bae41bbe55" Sep 30 00:41:14 crc kubenswrapper[4809]: I0930 00:41:14.333354 4809 scope.go:117] "RemoveContainer" containerID="57e5168e5b543a436549303cbf394fff2ab5613e0ba9beb7b161a1de79c59663" Sep 30 00:41:14 crc kubenswrapper[4809]: I0930 00:41:14.374134 4809 scope.go:117] "RemoveContainer" containerID="607a49a569b789b5f538ded9881802b35ad84bafd4c38d0c84fb4d0ed10f3fb9" Sep 30 00:41:16 crc kubenswrapper[4809]: I0930 00:41:16.039085 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-4218-account-create-m5tjr"] Sep 30 00:41:16 crc kubenswrapper[4809]: I0930 00:41:16.048589 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-4218-account-create-m5tjr"] Sep 30 00:41:17 crc kubenswrapper[4809]: I0930 00:41:17.036848 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-3a35-account-create-r6tmr"] Sep 30 00:41:17 crc kubenswrapper[4809]: I0930 00:41:17.048323 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-16f3-account-create-cklrw"] Sep 30 00:41:17 crc kubenswrapper[4809]: I0930 00:41:17.059436 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-3a35-account-create-r6tmr"] Sep 30 00:41:17 crc kubenswrapper[4809]: I0930 00:41:17.068894 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-16f3-account-create-cklrw"] Sep 30 00:41:17 crc kubenswrapper[4809]: I0930 00:41:17.705198 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a7e33b7-b3ea-4290-9680-eacdad360a53" path="/var/lib/kubelet/pods/7a7e33b7-b3ea-4290-9680-eacdad360a53/volumes" Sep 30 00:41:17 crc kubenswrapper[4809]: I0930 00:41:17.705807 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df13f8fe-c553-41f3-bcd6-625d07a7d2de" path="/var/lib/kubelet/pods/df13f8fe-c553-41f3-bcd6-625d07a7d2de/volumes" Sep 30 00:41:17 crc kubenswrapper[4809]: I0930 00:41:17.706331 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc" path="/var/lib/kubelet/pods/ea156bcc-d8c5-4cff-a3d7-cfa69e7b80dc/volumes" Sep 30 00:41:26 crc kubenswrapper[4809]: I0930 00:41:26.544418 4809 generic.go:334] "Generic (PLEG): container finished" podID="740f4755-38d2-4150-85b8-e69d10d305b8" containerID="3c44ea3aa91a201d9057fb4073c6d344f35e4a3701d9a409bdf256f3dc935712" exitCode=0 Sep 30 00:41:26 crc kubenswrapper[4809]: I0930 00:41:26.544493 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt" event={"ID":"740f4755-38d2-4150-85b8-e69d10d305b8","Type":"ContainerDied","Data":"3c44ea3aa91a201d9057fb4073c6d344f35e4a3701d9a409bdf256f3dc935712"} Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.021882 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.129748 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/740f4755-38d2-4150-85b8-e69d10d305b8-ssh-key\") pod \"740f4755-38d2-4150-85b8-e69d10d305b8\" (UID: \"740f4755-38d2-4150-85b8-e69d10d305b8\") " Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.130005 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/740f4755-38d2-4150-85b8-e69d10d305b8-inventory\") pod \"740f4755-38d2-4150-85b8-e69d10d305b8\" (UID: \"740f4755-38d2-4150-85b8-e69d10d305b8\") " Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.130182 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fh78\" (UniqueName: \"kubernetes.io/projected/740f4755-38d2-4150-85b8-e69d10d305b8-kube-api-access-5fh78\") pod \"740f4755-38d2-4150-85b8-e69d10d305b8\" (UID: \"740f4755-38d2-4150-85b8-e69d10d305b8\") " Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.135811 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/740f4755-38d2-4150-85b8-e69d10d305b8-kube-api-access-5fh78" (OuterVolumeSpecName: "kube-api-access-5fh78") pod "740f4755-38d2-4150-85b8-e69d10d305b8" (UID: "740f4755-38d2-4150-85b8-e69d10d305b8"). InnerVolumeSpecName "kube-api-access-5fh78". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.162145 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/740f4755-38d2-4150-85b8-e69d10d305b8-inventory" (OuterVolumeSpecName: "inventory") pod "740f4755-38d2-4150-85b8-e69d10d305b8" (UID: "740f4755-38d2-4150-85b8-e69d10d305b8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.165400 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/740f4755-38d2-4150-85b8-e69d10d305b8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "740f4755-38d2-4150-85b8-e69d10d305b8" (UID: "740f4755-38d2-4150-85b8-e69d10d305b8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.246931 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fh78\" (UniqueName: \"kubernetes.io/projected/740f4755-38d2-4150-85b8-e69d10d305b8-kube-api-access-5fh78\") on node \"crc\" DevicePath \"\"" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.246960 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/740f4755-38d2-4150-85b8-e69d10d305b8-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.246968 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/740f4755-38d2-4150-85b8-e69d10d305b8-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.568281 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt" event={"ID":"740f4755-38d2-4150-85b8-e69d10d305b8","Type":"ContainerDied","Data":"3a18b6443d7601c993e704f81f6cdd1f5e875ac3b905640c0de9c3edfab37057"} Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.568324 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.568330 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a18b6443d7601c993e704f81f6cdd1f5e875ac3b905640c0de9c3edfab37057" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.666808 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-k2gl9"] Sep 30 00:41:28 crc kubenswrapper[4809]: E0930 00:41:28.667291 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="740f4755-38d2-4150-85b8-e69d10d305b8" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.667308 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="740f4755-38d2-4150-85b8-e69d10d305b8" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.667507 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="740f4755-38d2-4150-85b8-e69d10d305b8" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.668367 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-k2gl9" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.673870 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.674134 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.674186 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.674249 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.676490 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-k2gl9"] Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.761766 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46znz\" (UniqueName: \"kubernetes.io/projected/fcf72aff-935b-4320-ac12-29bf554f224c-kube-api-access-46znz\") pod \"ssh-known-hosts-edpm-deployment-k2gl9\" (UID: \"fcf72aff-935b-4320-ac12-29bf554f224c\") " pod="openstack/ssh-known-hosts-edpm-deployment-k2gl9" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.761880 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/fcf72aff-935b-4320-ac12-29bf554f224c-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-k2gl9\" (UID: \"fcf72aff-935b-4320-ac12-29bf554f224c\") " pod="openstack/ssh-known-hosts-edpm-deployment-k2gl9" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.762015 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fcf72aff-935b-4320-ac12-29bf554f224c-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-k2gl9\" (UID: \"fcf72aff-935b-4320-ac12-29bf554f224c\") " pod="openstack/ssh-known-hosts-edpm-deployment-k2gl9" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.863243 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46znz\" (UniqueName: \"kubernetes.io/projected/fcf72aff-935b-4320-ac12-29bf554f224c-kube-api-access-46znz\") pod \"ssh-known-hosts-edpm-deployment-k2gl9\" (UID: \"fcf72aff-935b-4320-ac12-29bf554f224c\") " pod="openstack/ssh-known-hosts-edpm-deployment-k2gl9" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.863365 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/fcf72aff-935b-4320-ac12-29bf554f224c-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-k2gl9\" (UID: \"fcf72aff-935b-4320-ac12-29bf554f224c\") " pod="openstack/ssh-known-hosts-edpm-deployment-k2gl9" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.863466 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fcf72aff-935b-4320-ac12-29bf554f224c-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-k2gl9\" (UID: \"fcf72aff-935b-4320-ac12-29bf554f224c\") " pod="openstack/ssh-known-hosts-edpm-deployment-k2gl9" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.869676 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fcf72aff-935b-4320-ac12-29bf554f224c-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-k2gl9\" (UID: \"fcf72aff-935b-4320-ac12-29bf554f224c\") " pod="openstack/ssh-known-hosts-edpm-deployment-k2gl9" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.870234 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/fcf72aff-935b-4320-ac12-29bf554f224c-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-k2gl9\" (UID: \"fcf72aff-935b-4320-ac12-29bf554f224c\") " pod="openstack/ssh-known-hosts-edpm-deployment-k2gl9" Sep 30 00:41:28 crc kubenswrapper[4809]: I0930 00:41:28.894504 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46znz\" (UniqueName: \"kubernetes.io/projected/fcf72aff-935b-4320-ac12-29bf554f224c-kube-api-access-46znz\") pod \"ssh-known-hosts-edpm-deployment-k2gl9\" (UID: \"fcf72aff-935b-4320-ac12-29bf554f224c\") " pod="openstack/ssh-known-hosts-edpm-deployment-k2gl9" Sep 30 00:41:29 crc kubenswrapper[4809]: I0930 00:41:29.003220 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-k2gl9" Sep 30 00:41:29 crc kubenswrapper[4809]: I0930 00:41:29.590904 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-k2gl9"] Sep 30 00:41:30 crc kubenswrapper[4809]: I0930 00:41:30.585278 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-k2gl9" event={"ID":"fcf72aff-935b-4320-ac12-29bf554f224c","Type":"ContainerStarted","Data":"7420898a0192d0360cf65771248df4bea6c34f22b6a815b03b528d65fbb5ecfc"} Sep 30 00:41:30 crc kubenswrapper[4809]: I0930 00:41:30.585972 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-k2gl9" event={"ID":"fcf72aff-935b-4320-ac12-29bf554f224c","Type":"ContainerStarted","Data":"36857d68965e89e1b56f4109d744d52bafae4980c782d3bca886b85596c40ba2"} Sep 30 00:41:30 crc kubenswrapper[4809]: I0930 00:41:30.608443 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-k2gl9" podStartSLOduration=1.90998286 podStartE2EDuration="2.608424635s" podCreationTimestamp="2025-09-30 00:41:28 +0000 UTC" firstStartedPulling="2025-09-30 00:41:29.601251283 +0000 UTC m=+1940.637500731" lastFinishedPulling="2025-09-30 00:41:30.299693098 +0000 UTC m=+1941.335942506" observedRunningTime="2025-09-30 00:41:30.597804359 +0000 UTC m=+1941.634053777" watchObservedRunningTime="2025-09-30 00:41:30.608424635 +0000 UTC m=+1941.644674053" Sep 30 00:41:38 crc kubenswrapper[4809]: I0930 00:41:38.671761 4809 generic.go:334] "Generic (PLEG): container finished" podID="fcf72aff-935b-4320-ac12-29bf554f224c" containerID="7420898a0192d0360cf65771248df4bea6c34f22b6a815b03b528d65fbb5ecfc" exitCode=0 Sep 30 00:41:38 crc kubenswrapper[4809]: I0930 00:41:38.671945 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-k2gl9" event={"ID":"fcf72aff-935b-4320-ac12-29bf554f224c","Type":"ContainerDied","Data":"7420898a0192d0360cf65771248df4bea6c34f22b6a815b03b528d65fbb5ecfc"} Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.216944 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-k2gl9" Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.412828 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fcf72aff-935b-4320-ac12-29bf554f224c-ssh-key-openstack-edpm-ipam\") pod \"fcf72aff-935b-4320-ac12-29bf554f224c\" (UID: \"fcf72aff-935b-4320-ac12-29bf554f224c\") " Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.412888 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46znz\" (UniqueName: \"kubernetes.io/projected/fcf72aff-935b-4320-ac12-29bf554f224c-kube-api-access-46znz\") pod \"fcf72aff-935b-4320-ac12-29bf554f224c\" (UID: \"fcf72aff-935b-4320-ac12-29bf554f224c\") " Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.413122 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/fcf72aff-935b-4320-ac12-29bf554f224c-inventory-0\") pod \"fcf72aff-935b-4320-ac12-29bf554f224c\" (UID: \"fcf72aff-935b-4320-ac12-29bf554f224c\") " Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.423135 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcf72aff-935b-4320-ac12-29bf554f224c-kube-api-access-46znz" (OuterVolumeSpecName: "kube-api-access-46znz") pod "fcf72aff-935b-4320-ac12-29bf554f224c" (UID: "fcf72aff-935b-4320-ac12-29bf554f224c"). InnerVolumeSpecName "kube-api-access-46znz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.449884 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcf72aff-935b-4320-ac12-29bf554f224c-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "fcf72aff-935b-4320-ac12-29bf554f224c" (UID: "fcf72aff-935b-4320-ac12-29bf554f224c"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.456963 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcf72aff-935b-4320-ac12-29bf554f224c-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "fcf72aff-935b-4320-ac12-29bf554f224c" (UID: "fcf72aff-935b-4320-ac12-29bf554f224c"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.515703 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fcf72aff-935b-4320-ac12-29bf554f224c-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.515743 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46znz\" (UniqueName: \"kubernetes.io/projected/fcf72aff-935b-4320-ac12-29bf554f224c-kube-api-access-46znz\") on node \"crc\" DevicePath \"\"" Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.515761 4809 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/fcf72aff-935b-4320-ac12-29bf554f224c-inventory-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.700137 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-k2gl9" event={"ID":"fcf72aff-935b-4320-ac12-29bf554f224c","Type":"ContainerDied","Data":"36857d68965e89e1b56f4109d744d52bafae4980c782d3bca886b85596c40ba2"} Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.700191 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="36857d68965e89e1b56f4109d744d52bafae4980c782d3bca886b85596c40ba2" Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.700254 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-k2gl9" Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.765318 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6"] Sep 30 00:41:40 crc kubenswrapper[4809]: E0930 00:41:40.769296 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcf72aff-935b-4320-ac12-29bf554f224c" containerName="ssh-known-hosts-edpm-deployment" Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.769333 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcf72aff-935b-4320-ac12-29bf554f224c" containerName="ssh-known-hosts-edpm-deployment" Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.769562 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcf72aff-935b-4320-ac12-29bf554f224c" containerName="ssh-known-hosts-edpm-deployment" Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.770341 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6" Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.773109 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.773432 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.773605 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.775447 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6"] Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.777369 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.924538 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9rww\" (UniqueName: \"kubernetes.io/projected/5e5f0c82-2239-41df-886e-be2d3b59bc85-kube-api-access-k9rww\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-jwhf6\" (UID: \"5e5f0c82-2239-41df-886e-be2d3b59bc85\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6" Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.924659 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e5f0c82-2239-41df-886e-be2d3b59bc85-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-jwhf6\" (UID: \"5e5f0c82-2239-41df-886e-be2d3b59bc85\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6" Sep 30 00:41:40 crc kubenswrapper[4809]: I0930 00:41:40.924716 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5e5f0c82-2239-41df-886e-be2d3b59bc85-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-jwhf6\" (UID: \"5e5f0c82-2239-41df-886e-be2d3b59bc85\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6" Sep 30 00:41:41 crc kubenswrapper[4809]: I0930 00:41:41.026548 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e5f0c82-2239-41df-886e-be2d3b59bc85-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-jwhf6\" (UID: \"5e5f0c82-2239-41df-886e-be2d3b59bc85\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6" Sep 30 00:41:41 crc kubenswrapper[4809]: I0930 00:41:41.026601 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5e5f0c82-2239-41df-886e-be2d3b59bc85-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-jwhf6\" (UID: \"5e5f0c82-2239-41df-886e-be2d3b59bc85\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6" Sep 30 00:41:41 crc kubenswrapper[4809]: I0930 00:41:41.026739 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9rww\" (UniqueName: \"kubernetes.io/projected/5e5f0c82-2239-41df-886e-be2d3b59bc85-kube-api-access-k9rww\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-jwhf6\" (UID: \"5e5f0c82-2239-41df-886e-be2d3b59bc85\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6" Sep 30 00:41:41 crc kubenswrapper[4809]: I0930 00:41:41.030851 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5e5f0c82-2239-41df-886e-be2d3b59bc85-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-jwhf6\" (UID: \"5e5f0c82-2239-41df-886e-be2d3b59bc85\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6" Sep 30 00:41:41 crc kubenswrapper[4809]: I0930 00:41:41.035413 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e5f0c82-2239-41df-886e-be2d3b59bc85-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-jwhf6\" (UID: \"5e5f0c82-2239-41df-886e-be2d3b59bc85\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6" Sep 30 00:41:41 crc kubenswrapper[4809]: I0930 00:41:41.042028 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9rww\" (UniqueName: \"kubernetes.io/projected/5e5f0c82-2239-41df-886e-be2d3b59bc85-kube-api-access-k9rww\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-jwhf6\" (UID: \"5e5f0c82-2239-41df-886e-be2d3b59bc85\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6" Sep 30 00:41:41 crc kubenswrapper[4809]: I0930 00:41:41.090370 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6" Sep 30 00:41:41 crc kubenswrapper[4809]: I0930 00:41:41.752232 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6"] Sep 30 00:41:42 crc kubenswrapper[4809]: I0930 00:41:42.052481 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-9thmh"] Sep 30 00:41:42 crc kubenswrapper[4809]: I0930 00:41:42.067058 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-9thmh"] Sep 30 00:41:42 crc kubenswrapper[4809]: I0930 00:41:42.722853 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6" event={"ID":"5e5f0c82-2239-41df-886e-be2d3b59bc85","Type":"ContainerStarted","Data":"0f010df3a9ce12ba31a1f370b7ede95e26bec1000c067f81f9bbc30b4815d575"} Sep 30 00:41:43 crc kubenswrapper[4809]: I0930 00:41:43.703765 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f81f791a-b60f-4d43-b645-47308146aac2" path="/var/lib/kubelet/pods/f81f791a-b60f-4d43-b645-47308146aac2/volumes" Sep 30 00:41:43 crc kubenswrapper[4809]: I0930 00:41:43.734465 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6" event={"ID":"5e5f0c82-2239-41df-886e-be2d3b59bc85","Type":"ContainerStarted","Data":"4466f448d56626dba7c62709e609536817d5d00a117f2fcf32c701e0c81e0309"} Sep 30 00:41:43 crc kubenswrapper[4809]: I0930 00:41:43.761732 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6" podStartSLOduration=3.025575599 podStartE2EDuration="3.761709373s" podCreationTimestamp="2025-09-30 00:41:40 +0000 UTC" firstStartedPulling="2025-09-30 00:41:41.757713028 +0000 UTC m=+1952.793962436" lastFinishedPulling="2025-09-30 00:41:42.493846802 +0000 UTC m=+1953.530096210" observedRunningTime="2025-09-30 00:41:43.75046955 +0000 UTC m=+1954.786718968" watchObservedRunningTime="2025-09-30 00:41:43.761709373 +0000 UTC m=+1954.797958791" Sep 30 00:41:47 crc kubenswrapper[4809]: I0930 00:41:47.031965 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-create-572xr"] Sep 30 00:41:47 crc kubenswrapper[4809]: I0930 00:41:47.044683 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-create-572xr"] Sep 30 00:41:47 crc kubenswrapper[4809]: I0930 00:41:47.704898 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="510b1218-ee5e-4ab3-a2a9-ade90df3466b" path="/var/lib/kubelet/pods/510b1218-ee5e-4ab3-a2a9-ade90df3466b/volumes" Sep 30 00:41:51 crc kubenswrapper[4809]: I0930 00:41:51.834446 4809 generic.go:334] "Generic (PLEG): container finished" podID="5e5f0c82-2239-41df-886e-be2d3b59bc85" containerID="4466f448d56626dba7c62709e609536817d5d00a117f2fcf32c701e0c81e0309" exitCode=0 Sep 30 00:41:51 crc kubenswrapper[4809]: I0930 00:41:51.834579 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6" event={"ID":"5e5f0c82-2239-41df-886e-be2d3b59bc85","Type":"ContainerDied","Data":"4466f448d56626dba7c62709e609536817d5d00a117f2fcf32c701e0c81e0309"} Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.420973 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6" Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.525342 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e5f0c82-2239-41df-886e-be2d3b59bc85-inventory\") pod \"5e5f0c82-2239-41df-886e-be2d3b59bc85\" (UID: \"5e5f0c82-2239-41df-886e-be2d3b59bc85\") " Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.525384 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5e5f0c82-2239-41df-886e-be2d3b59bc85-ssh-key\") pod \"5e5f0c82-2239-41df-886e-be2d3b59bc85\" (UID: \"5e5f0c82-2239-41df-886e-be2d3b59bc85\") " Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.525568 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9rww\" (UniqueName: \"kubernetes.io/projected/5e5f0c82-2239-41df-886e-be2d3b59bc85-kube-api-access-k9rww\") pod \"5e5f0c82-2239-41df-886e-be2d3b59bc85\" (UID: \"5e5f0c82-2239-41df-886e-be2d3b59bc85\") " Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.531234 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e5f0c82-2239-41df-886e-be2d3b59bc85-kube-api-access-k9rww" (OuterVolumeSpecName: "kube-api-access-k9rww") pod "5e5f0c82-2239-41df-886e-be2d3b59bc85" (UID: "5e5f0c82-2239-41df-886e-be2d3b59bc85"). InnerVolumeSpecName "kube-api-access-k9rww". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.558688 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e5f0c82-2239-41df-886e-be2d3b59bc85-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5e5f0c82-2239-41df-886e-be2d3b59bc85" (UID: "5e5f0c82-2239-41df-886e-be2d3b59bc85"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.571013 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e5f0c82-2239-41df-886e-be2d3b59bc85-inventory" (OuterVolumeSpecName: "inventory") pod "5e5f0c82-2239-41df-886e-be2d3b59bc85" (UID: "5e5f0c82-2239-41df-886e-be2d3b59bc85"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.628632 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e5f0c82-2239-41df-886e-be2d3b59bc85-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.628732 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5e5f0c82-2239-41df-886e-be2d3b59bc85-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.628759 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9rww\" (UniqueName: \"kubernetes.io/projected/5e5f0c82-2239-41df-886e-be2d3b59bc85-kube-api-access-k9rww\") on node \"crc\" DevicePath \"\"" Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.861831 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6" event={"ID":"5e5f0c82-2239-41df-886e-be2d3b59bc85","Type":"ContainerDied","Data":"0f010df3a9ce12ba31a1f370b7ede95e26bec1000c067f81f9bbc30b4815d575"} Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.861875 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f010df3a9ce12ba31a1f370b7ede95e26bec1000c067f81f9bbc30b4815d575" Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.861945 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6" Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.937034 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2"] Sep 30 00:41:53 crc kubenswrapper[4809]: E0930 00:41:53.937597 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e5f0c82-2239-41df-886e-be2d3b59bc85" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.937618 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e5f0c82-2239-41df-886e-be2d3b59bc85" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.937873 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e5f0c82-2239-41df-886e-be2d3b59bc85" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.938787 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2" Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.940522 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.941394 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.941435 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.942255 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 00:41:53 crc kubenswrapper[4809]: I0930 00:41:53.951985 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2"] Sep 30 00:41:54 crc kubenswrapper[4809]: I0930 00:41:54.037350 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a96249d1-892d-4100-b032-c805fc0f9cbb-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2\" (UID: \"a96249d1-892d-4100-b032-c805fc0f9cbb\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2" Sep 30 00:41:54 crc kubenswrapper[4809]: I0930 00:41:54.037397 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a96249d1-892d-4100-b032-c805fc0f9cbb-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2\" (UID: \"a96249d1-892d-4100-b032-c805fc0f9cbb\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2" Sep 30 00:41:54 crc kubenswrapper[4809]: I0930 00:41:54.037428 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjk54\" (UniqueName: \"kubernetes.io/projected/a96249d1-892d-4100-b032-c805fc0f9cbb-kube-api-access-xjk54\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2\" (UID: \"a96249d1-892d-4100-b032-c805fc0f9cbb\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2" Sep 30 00:41:54 crc kubenswrapper[4809]: I0930 00:41:54.140024 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a96249d1-892d-4100-b032-c805fc0f9cbb-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2\" (UID: \"a96249d1-892d-4100-b032-c805fc0f9cbb\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2" Sep 30 00:41:54 crc kubenswrapper[4809]: I0930 00:41:54.140082 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a96249d1-892d-4100-b032-c805fc0f9cbb-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2\" (UID: \"a96249d1-892d-4100-b032-c805fc0f9cbb\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2" Sep 30 00:41:54 crc kubenswrapper[4809]: I0930 00:41:54.140120 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjk54\" (UniqueName: \"kubernetes.io/projected/a96249d1-892d-4100-b032-c805fc0f9cbb-kube-api-access-xjk54\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2\" (UID: \"a96249d1-892d-4100-b032-c805fc0f9cbb\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2" Sep 30 00:41:54 crc kubenswrapper[4809]: I0930 00:41:54.143823 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a96249d1-892d-4100-b032-c805fc0f9cbb-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2\" (UID: \"a96249d1-892d-4100-b032-c805fc0f9cbb\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2" Sep 30 00:41:54 crc kubenswrapper[4809]: I0930 00:41:54.144493 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a96249d1-892d-4100-b032-c805fc0f9cbb-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2\" (UID: \"a96249d1-892d-4100-b032-c805fc0f9cbb\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2" Sep 30 00:41:54 crc kubenswrapper[4809]: I0930 00:41:54.157887 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjk54\" (UniqueName: \"kubernetes.io/projected/a96249d1-892d-4100-b032-c805fc0f9cbb-kube-api-access-xjk54\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2\" (UID: \"a96249d1-892d-4100-b032-c805fc0f9cbb\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2" Sep 30 00:41:54 crc kubenswrapper[4809]: I0930 00:41:54.263393 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2" Sep 30 00:41:54 crc kubenswrapper[4809]: I0930 00:41:54.809196 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2"] Sep 30 00:41:54 crc kubenswrapper[4809]: I0930 00:41:54.872548 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2" event={"ID":"a96249d1-892d-4100-b032-c805fc0f9cbb","Type":"ContainerStarted","Data":"dbd66928eb6c2a9e10e52a041a8e5c91f634a72870e3f7386b1c8c57e3d97835"} Sep 30 00:41:55 crc kubenswrapper[4809]: I0930 00:41:55.886737 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2" event={"ID":"a96249d1-892d-4100-b032-c805fc0f9cbb","Type":"ContainerStarted","Data":"242d74b4edafb24e91a8cf568650b46be4238da5dfceaef824bd712ca9e74a6d"} Sep 30 00:41:55 crc kubenswrapper[4809]: I0930 00:41:55.923347 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2" podStartSLOduration=2.240856183 podStartE2EDuration="2.923318488s" podCreationTimestamp="2025-09-30 00:41:53 +0000 UTC" firstStartedPulling="2025-09-30 00:41:54.814603624 +0000 UTC m=+1965.850853052" lastFinishedPulling="2025-09-30 00:41:55.497065919 +0000 UTC m=+1966.533315357" observedRunningTime="2025-09-30 00:41:55.912043271 +0000 UTC m=+1966.948292689" watchObservedRunningTime="2025-09-30 00:41:55.923318488 +0000 UTC m=+1966.959567926" Sep 30 00:41:58 crc kubenswrapper[4809]: I0930 00:41:58.031555 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-f56b-account-create-wfnjk"] Sep 30 00:41:58 crc kubenswrapper[4809]: I0930 00:41:58.047540 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-f56b-account-create-wfnjk"] Sep 30 00:41:59 crc kubenswrapper[4809]: I0930 00:41:59.715379 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76dd894e-8637-49ec-953e-6e5aa1ee4e21" path="/var/lib/kubelet/pods/76dd894e-8637-49ec-953e-6e5aa1ee4e21/volumes" Sep 30 00:42:05 crc kubenswrapper[4809]: I0930 00:42:05.043263 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-cvhd7"] Sep 30 00:42:05 crc kubenswrapper[4809]: I0930 00:42:05.054886 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-cvhd7"] Sep 30 00:42:05 crc kubenswrapper[4809]: I0930 00:42:05.706130 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14b3d277-31b4-4f12-b360-aeabe9420f33" path="/var/lib/kubelet/pods/14b3d277-31b4-4f12-b360-aeabe9420f33/volumes" Sep 30 00:42:06 crc kubenswrapper[4809]: I0930 00:42:06.002537 4809 generic.go:334] "Generic (PLEG): container finished" podID="a96249d1-892d-4100-b032-c805fc0f9cbb" containerID="242d74b4edafb24e91a8cf568650b46be4238da5dfceaef824bd712ca9e74a6d" exitCode=0 Sep 30 00:42:06 crc kubenswrapper[4809]: I0930 00:42:06.002595 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2" event={"ID":"a96249d1-892d-4100-b032-c805fc0f9cbb","Type":"ContainerDied","Data":"242d74b4edafb24e91a8cf568650b46be4238da5dfceaef824bd712ca9e74a6d"} Sep 30 00:42:06 crc kubenswrapper[4809]: I0930 00:42:06.048848 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-zvndc"] Sep 30 00:42:06 crc kubenswrapper[4809]: I0930 00:42:06.060200 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-zvndc"] Sep 30 00:42:07 crc kubenswrapper[4809]: I0930 00:42:07.547796 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2" Sep 30 00:42:07 crc kubenswrapper[4809]: I0930 00:42:07.640749 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a96249d1-892d-4100-b032-c805fc0f9cbb-inventory\") pod \"a96249d1-892d-4100-b032-c805fc0f9cbb\" (UID: \"a96249d1-892d-4100-b032-c805fc0f9cbb\") " Sep 30 00:42:07 crc kubenswrapper[4809]: I0930 00:42:07.640859 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjk54\" (UniqueName: \"kubernetes.io/projected/a96249d1-892d-4100-b032-c805fc0f9cbb-kube-api-access-xjk54\") pod \"a96249d1-892d-4100-b032-c805fc0f9cbb\" (UID: \"a96249d1-892d-4100-b032-c805fc0f9cbb\") " Sep 30 00:42:07 crc kubenswrapper[4809]: I0930 00:42:07.640945 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a96249d1-892d-4100-b032-c805fc0f9cbb-ssh-key\") pod \"a96249d1-892d-4100-b032-c805fc0f9cbb\" (UID: \"a96249d1-892d-4100-b032-c805fc0f9cbb\") " Sep 30 00:42:07 crc kubenswrapper[4809]: I0930 00:42:07.646430 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a96249d1-892d-4100-b032-c805fc0f9cbb-kube-api-access-xjk54" (OuterVolumeSpecName: "kube-api-access-xjk54") pod "a96249d1-892d-4100-b032-c805fc0f9cbb" (UID: "a96249d1-892d-4100-b032-c805fc0f9cbb"). InnerVolumeSpecName "kube-api-access-xjk54". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:42:07 crc kubenswrapper[4809]: I0930 00:42:07.675229 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a96249d1-892d-4100-b032-c805fc0f9cbb-inventory" (OuterVolumeSpecName: "inventory") pod "a96249d1-892d-4100-b032-c805fc0f9cbb" (UID: "a96249d1-892d-4100-b032-c805fc0f9cbb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:42:07 crc kubenswrapper[4809]: I0930 00:42:07.685160 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a96249d1-892d-4100-b032-c805fc0f9cbb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a96249d1-892d-4100-b032-c805fc0f9cbb" (UID: "a96249d1-892d-4100-b032-c805fc0f9cbb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:42:07 crc kubenswrapper[4809]: I0930 00:42:07.703766 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2f48302-4af1-49a1-948a-086de787a0c4" path="/var/lib/kubelet/pods/e2f48302-4af1-49a1-948a-086de787a0c4/volumes" Sep 30 00:42:07 crc kubenswrapper[4809]: I0930 00:42:07.743753 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a96249d1-892d-4100-b032-c805fc0f9cbb-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:07 crc kubenswrapper[4809]: I0930 00:42:07.743800 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjk54\" (UniqueName: \"kubernetes.io/projected/a96249d1-892d-4100-b032-c805fc0f9cbb-kube-api-access-xjk54\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:07 crc kubenswrapper[4809]: I0930 00:42:07.743819 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a96249d1-892d-4100-b032-c805fc0f9cbb-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.029457 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2" event={"ID":"a96249d1-892d-4100-b032-c805fc0f9cbb","Type":"ContainerDied","Data":"dbd66928eb6c2a9e10e52a041a8e5c91f634a72870e3f7386b1c8c57e3d97835"} Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.029533 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dbd66928eb6c2a9e10e52a041a8e5c91f634a72870e3f7386b1c8c57e3d97835" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.029556 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.150793 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp"] Sep 30 00:42:08 crc kubenswrapper[4809]: E0930 00:42:08.151313 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a96249d1-892d-4100-b032-c805fc0f9cbb" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.151339 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a96249d1-892d-4100-b032-c805fc0f9cbb" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.151599 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="a96249d1-892d-4100-b032-c805fc0f9cbb" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.152512 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.159569 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.159787 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.159820 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.159943 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.160323 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.160418 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.160434 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.160510 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.175407 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp"] Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.255822 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.256213 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-telemetry-power-monitoring-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.256304 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.256334 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.256408 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.256436 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skgrt\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-kube-api-access-skgrt\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.256466 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.256497 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.256554 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.256633 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.256682 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.256710 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.256833 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.358229 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.358283 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.358319 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.358390 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.358450 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.358495 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-telemetry-power-monitoring-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.358800 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.358844 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.359420 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.359467 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skgrt\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-kube-api-access-skgrt\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.359507 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.359581 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.359695 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.365129 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.365287 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.365688 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.365728 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-telemetry-power-monitoring-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.365697 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.365959 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.366902 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.367731 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.368631 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.369026 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.372731 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.374443 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.378459 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skgrt\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-kube-api-access-skgrt\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:08 crc kubenswrapper[4809]: I0930 00:42:08.471097 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:09 crc kubenswrapper[4809]: I0930 00:42:09.081702 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp"] Sep 30 00:42:10 crc kubenswrapper[4809]: I0930 00:42:10.056417 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" event={"ID":"1469fb07-9396-4b4f-afbd-d50ced38d02d","Type":"ContainerStarted","Data":"5399a8680c7e0b31c920f96e51a06f322489f6c55088fa6bb3677f89b44dcade"} Sep 30 00:42:10 crc kubenswrapper[4809]: I0930 00:42:10.056790 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" event={"ID":"1469fb07-9396-4b4f-afbd-d50ced38d02d","Type":"ContainerStarted","Data":"3d61b9f3117332aa5359a7915161524d695db93a5fbc9eb63db12928b53d4258"} Sep 30 00:42:10 crc kubenswrapper[4809]: I0930 00:42:10.079595 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" podStartSLOduration=1.622555702 podStartE2EDuration="2.079569695s" podCreationTimestamp="2025-09-30 00:42:08 +0000 UTC" firstStartedPulling="2025-09-30 00:42:09.085544446 +0000 UTC m=+1980.121793854" lastFinishedPulling="2025-09-30 00:42:09.542558419 +0000 UTC m=+1980.578807847" observedRunningTime="2025-09-30 00:42:10.076968135 +0000 UTC m=+1981.113217593" watchObservedRunningTime="2025-09-30 00:42:10.079569695 +0000 UTC m=+1981.115819123" Sep 30 00:42:14 crc kubenswrapper[4809]: I0930 00:42:14.520697 4809 scope.go:117] "RemoveContainer" containerID="242d241e4e0d2e4694ea04f4fb50d749755a51ed502d97869e24c0b2218f2fee" Sep 30 00:42:14 crc kubenswrapper[4809]: I0930 00:42:14.592481 4809 scope.go:117] "RemoveContainer" containerID="df86103d3a237b581adb6ffa0c0fce1ea64ea0b5216c4e436af204ec24ead13b" Sep 30 00:42:14 crc kubenswrapper[4809]: I0930 00:42:14.638092 4809 scope.go:117] "RemoveContainer" containerID="2f7f82b0982ed33c4181afb150f83f258a6e4fb448e08836f6b12bf5785d8984" Sep 30 00:42:14 crc kubenswrapper[4809]: I0930 00:42:14.659888 4809 scope.go:117] "RemoveContainer" containerID="63a0486be55448e34ddfc2f5eb50502e755d08247b8c58d5cf8eeab0da4b6a56" Sep 30 00:42:14 crc kubenswrapper[4809]: I0930 00:42:14.707520 4809 scope.go:117] "RemoveContainer" containerID="d11e618a1ce17a6be1fbebcb668b532c873bc5293ca1c2b63879b8044cf79faa" Sep 30 00:42:14 crc kubenswrapper[4809]: I0930 00:42:14.756231 4809 scope.go:117] "RemoveContainer" containerID="e08ad1e4e5b32ec57967d012dc1509136484554eafbcf91dce3380dbfa8d1bb3" Sep 30 00:42:14 crc kubenswrapper[4809]: I0930 00:42:14.801837 4809 scope.go:117] "RemoveContainer" containerID="eab730e58276147ed70d915f0c84500f12b6dca67d97368f0c7b708c7394f3c8" Sep 30 00:42:14 crc kubenswrapper[4809]: I0930 00:42:14.832255 4809 scope.go:117] "RemoveContainer" containerID="0bf2f4633116c7e689d365153c5d34e0c42e77184ad0f351762524f45a81588c" Sep 30 00:42:25 crc kubenswrapper[4809]: I0930 00:42:25.325316 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:42:25 crc kubenswrapper[4809]: I0930 00:42:25.326000 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:42:50 crc kubenswrapper[4809]: I0930 00:42:50.053992 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-c2txl"] Sep 30 00:42:50 crc kubenswrapper[4809]: I0930 00:42:50.064030 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-c2txl"] Sep 30 00:42:50 crc kubenswrapper[4809]: I0930 00:42:50.545357 4809 generic.go:334] "Generic (PLEG): container finished" podID="1469fb07-9396-4b4f-afbd-d50ced38d02d" containerID="5399a8680c7e0b31c920f96e51a06f322489f6c55088fa6bb3677f89b44dcade" exitCode=0 Sep 30 00:42:50 crc kubenswrapper[4809]: I0930 00:42:50.545430 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" event={"ID":"1469fb07-9396-4b4f-afbd-d50ced38d02d","Type":"ContainerDied","Data":"5399a8680c7e0b31c920f96e51a06f322489f6c55088fa6bb3677f89b44dcade"} Sep 30 00:42:51 crc kubenswrapper[4809]: I0930 00:42:51.714911 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="009b9521-367b-4991-910e-b3ede9622095" path="/var/lib/kubelet/pods/009b9521-367b-4991-910e-b3ede9622095/volumes" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.049425 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.204703 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-bootstrap-combined-ca-bundle\") pod \"1469fb07-9396-4b4f-afbd-d50ced38d02d\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.205049 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"1469fb07-9396-4b4f-afbd-d50ced38d02d\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.205095 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-telemetry-combined-ca-bundle\") pod \"1469fb07-9396-4b4f-afbd-d50ced38d02d\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.205146 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-ssh-key\") pod \"1469fb07-9396-4b4f-afbd-d50ced38d02d\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.205223 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-libvirt-combined-ca-bundle\") pod \"1469fb07-9396-4b4f-afbd-d50ced38d02d\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.205251 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-telemetry-power-monitoring-combined-ca-bundle\") pod \"1469fb07-9396-4b4f-afbd-d50ced38d02d\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.205275 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-repo-setup-combined-ca-bundle\") pod \"1469fb07-9396-4b4f-afbd-d50ced38d02d\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.205297 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"1469fb07-9396-4b4f-afbd-d50ced38d02d\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.205323 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-inventory\") pod \"1469fb07-9396-4b4f-afbd-d50ced38d02d\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.205439 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skgrt\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-kube-api-access-skgrt\") pod \"1469fb07-9396-4b4f-afbd-d50ced38d02d\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.205499 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-ovn-default-certs-0\") pod \"1469fb07-9396-4b4f-afbd-d50ced38d02d\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.205533 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-ovn-combined-ca-bundle\") pod \"1469fb07-9396-4b4f-afbd-d50ced38d02d\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.205552 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\") pod \"1469fb07-9396-4b4f-afbd-d50ced38d02d\" (UID: \"1469fb07-9396-4b4f-afbd-d50ced38d02d\") " Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.211445 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "1469fb07-9396-4b4f-afbd-d50ced38d02d" (UID: "1469fb07-9396-4b4f-afbd-d50ced38d02d"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.211721 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "1469fb07-9396-4b4f-afbd-d50ced38d02d" (UID: "1469fb07-9396-4b4f-afbd-d50ced38d02d"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.212263 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-kube-api-access-skgrt" (OuterVolumeSpecName: "kube-api-access-skgrt") pod "1469fb07-9396-4b4f-afbd-d50ced38d02d" (UID: "1469fb07-9396-4b4f-afbd-d50ced38d02d"). InnerVolumeSpecName "kube-api-access-skgrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.212343 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "1469fb07-9396-4b4f-afbd-d50ced38d02d" (UID: "1469fb07-9396-4b4f-afbd-d50ced38d02d"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.213547 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "1469fb07-9396-4b4f-afbd-d50ced38d02d" (UID: "1469fb07-9396-4b4f-afbd-d50ced38d02d"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.214448 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "1469fb07-9396-4b4f-afbd-d50ced38d02d" (UID: "1469fb07-9396-4b4f-afbd-d50ced38d02d"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.215188 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0") pod "1469fb07-9396-4b4f-afbd-d50ced38d02d" (UID: "1469fb07-9396-4b4f-afbd-d50ced38d02d"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.215202 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-telemetry-power-monitoring-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-power-monitoring-combined-ca-bundle") pod "1469fb07-9396-4b4f-afbd-d50ced38d02d" (UID: "1469fb07-9396-4b4f-afbd-d50ced38d02d"). InnerVolumeSpecName "telemetry-power-monitoring-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.216548 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "1469fb07-9396-4b4f-afbd-d50ced38d02d" (UID: "1469fb07-9396-4b4f-afbd-d50ced38d02d"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.217308 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "1469fb07-9396-4b4f-afbd-d50ced38d02d" (UID: "1469fb07-9396-4b4f-afbd-d50ced38d02d"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.221807 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "1469fb07-9396-4b4f-afbd-d50ced38d02d" (UID: "1469fb07-9396-4b4f-afbd-d50ced38d02d"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.243221 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1469fb07-9396-4b4f-afbd-d50ced38d02d" (UID: "1469fb07-9396-4b4f-afbd-d50ced38d02d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.263219 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-inventory" (OuterVolumeSpecName: "inventory") pod "1469fb07-9396-4b4f-afbd-d50ced38d02d" (UID: "1469fb07-9396-4b4f-afbd-d50ced38d02d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.307619 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.307668 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skgrt\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-kube-api-access-skgrt\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.307680 4809 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.307691 4809 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.307700 4809 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.307710 4809 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.307719 4809 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.307728 4809 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.307737 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.307744 4809 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.307752 4809 reconciler_common.go:293] "Volume detached for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-telemetry-power-monitoring-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.307761 4809 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1469fb07-9396-4b4f-afbd-d50ced38d02d-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.307769 4809 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1469fb07-9396-4b4f-afbd-d50ced38d02d-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.572310 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" event={"ID":"1469fb07-9396-4b4f-afbd-d50ced38d02d","Type":"ContainerDied","Data":"3d61b9f3117332aa5359a7915161524d695db93a5fbc9eb63db12928b53d4258"} Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.572429 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d61b9f3117332aa5359a7915161524d695db93a5fbc9eb63db12928b53d4258" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.572403 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.671416 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr"] Sep 30 00:42:52 crc kubenswrapper[4809]: E0930 00:42:52.671826 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1469fb07-9396-4b4f-afbd-d50ced38d02d" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.671844 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="1469fb07-9396-4b4f-afbd-d50ced38d02d" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.672057 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="1469fb07-9396-4b4f-afbd-d50ced38d02d" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.672877 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.675813 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.676063 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.676227 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.676406 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.677431 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.684711 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr"] Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.820214 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ndvjr\" (UID: \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.820300 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ndvjr\" (UID: \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.820400 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnfbt\" (UniqueName: \"kubernetes.io/projected/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-kube-api-access-nnfbt\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ndvjr\" (UID: \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.820566 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ndvjr\" (UID: \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.820636 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ndvjr\" (UID: \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.922092 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ndvjr\" (UID: \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.922157 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ndvjr\" (UID: \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.922279 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnfbt\" (UniqueName: \"kubernetes.io/projected/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-kube-api-access-nnfbt\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ndvjr\" (UID: \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.922366 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ndvjr\" (UID: \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.922404 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ndvjr\" (UID: \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.924597 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ndvjr\" (UID: \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.926877 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ndvjr\" (UID: \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.931174 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ndvjr\" (UID: \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.932532 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ndvjr\" (UID: \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.953052 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnfbt\" (UniqueName: \"kubernetes.io/projected/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-kube-api-access-nnfbt\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ndvjr\" (UID: \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" Sep 30 00:42:52 crc kubenswrapper[4809]: I0930 00:42:52.991066 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" Sep 30 00:42:53 crc kubenswrapper[4809]: I0930 00:42:53.504369 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr"] Sep 30 00:42:53 crc kubenswrapper[4809]: I0930 00:42:53.583245 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" event={"ID":"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d","Type":"ContainerStarted","Data":"da79ada59f6f8fe916d575327ba654695060ded6b98659eca3768efda4b42020"} Sep 30 00:42:54 crc kubenswrapper[4809]: I0930 00:42:54.598846 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" event={"ID":"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d","Type":"ContainerStarted","Data":"a32a0882f766b52834b1c9850e71ae9b6dd9c6ba28183f75a6d6f3f5dafe2bcd"} Sep 30 00:42:54 crc kubenswrapper[4809]: I0930 00:42:54.622339 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" podStartSLOduration=2.066202922 podStartE2EDuration="2.622318564s" podCreationTimestamp="2025-09-30 00:42:52 +0000 UTC" firstStartedPulling="2025-09-30 00:42:53.506203533 +0000 UTC m=+2024.542452951" lastFinishedPulling="2025-09-30 00:42:54.062319165 +0000 UTC m=+2025.098568593" observedRunningTime="2025-09-30 00:42:54.619058066 +0000 UTC m=+2025.655307474" watchObservedRunningTime="2025-09-30 00:42:54.622318564 +0000 UTC m=+2025.658567972" Sep 30 00:42:55 crc kubenswrapper[4809]: I0930 00:42:55.325170 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:42:55 crc kubenswrapper[4809]: I0930 00:42:55.325228 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:43:15 crc kubenswrapper[4809]: I0930 00:43:15.021286 4809 scope.go:117] "RemoveContainer" containerID="bf4b137ba6dc126192b8984b62420580aaadd43805dc3cd4f86aba951d598406" Sep 30 00:43:25 crc kubenswrapper[4809]: I0930 00:43:25.325392 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:43:25 crc kubenswrapper[4809]: I0930 00:43:25.325963 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:43:25 crc kubenswrapper[4809]: I0930 00:43:25.326011 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:43:25 crc kubenswrapper[4809]: I0930 00:43:25.326823 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"13902e7754cc13427dac3722dc2f321035defa3ee2cdd26c1b7de958ff393f88"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:43:25 crc kubenswrapper[4809]: I0930 00:43:25.326875 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://13902e7754cc13427dac3722dc2f321035defa3ee2cdd26c1b7de958ff393f88" gracePeriod=600 Sep 30 00:43:25 crc kubenswrapper[4809]: I0930 00:43:25.915341 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="13902e7754cc13427dac3722dc2f321035defa3ee2cdd26c1b7de958ff393f88" exitCode=0 Sep 30 00:43:25 crc kubenswrapper[4809]: I0930 00:43:25.915430 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"13902e7754cc13427dac3722dc2f321035defa3ee2cdd26c1b7de958ff393f88"} Sep 30 00:43:25 crc kubenswrapper[4809]: I0930 00:43:25.915859 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b"} Sep 30 00:43:25 crc kubenswrapper[4809]: I0930 00:43:25.915888 4809 scope.go:117] "RemoveContainer" containerID="1ea931bc1aa6f9ecd3943ef5200788ccf6880a9ed2be45c91d00d4a279bbf8ac" Sep 30 00:43:35 crc kubenswrapper[4809]: I0930 00:43:35.329369 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hhh9g"] Sep 30 00:43:35 crc kubenswrapper[4809]: I0930 00:43:35.332557 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hhh9g" Sep 30 00:43:35 crc kubenswrapper[4809]: I0930 00:43:35.343247 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hhh9g"] Sep 30 00:43:35 crc kubenswrapper[4809]: I0930 00:43:35.399454 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/048b5ca4-f20c-403a-97d1-38b015d129c5-catalog-content\") pod \"certified-operators-hhh9g\" (UID: \"048b5ca4-f20c-403a-97d1-38b015d129c5\") " pod="openshift-marketplace/certified-operators-hhh9g" Sep 30 00:43:35 crc kubenswrapper[4809]: I0930 00:43:35.399625 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/048b5ca4-f20c-403a-97d1-38b015d129c5-utilities\") pod \"certified-operators-hhh9g\" (UID: \"048b5ca4-f20c-403a-97d1-38b015d129c5\") " pod="openshift-marketplace/certified-operators-hhh9g" Sep 30 00:43:35 crc kubenswrapper[4809]: I0930 00:43:35.399659 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7k8c\" (UniqueName: \"kubernetes.io/projected/048b5ca4-f20c-403a-97d1-38b015d129c5-kube-api-access-b7k8c\") pod \"certified-operators-hhh9g\" (UID: \"048b5ca4-f20c-403a-97d1-38b015d129c5\") " pod="openshift-marketplace/certified-operators-hhh9g" Sep 30 00:43:35 crc kubenswrapper[4809]: I0930 00:43:35.502295 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/048b5ca4-f20c-403a-97d1-38b015d129c5-utilities\") pod \"certified-operators-hhh9g\" (UID: \"048b5ca4-f20c-403a-97d1-38b015d129c5\") " pod="openshift-marketplace/certified-operators-hhh9g" Sep 30 00:43:35 crc kubenswrapper[4809]: I0930 00:43:35.502349 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7k8c\" (UniqueName: \"kubernetes.io/projected/048b5ca4-f20c-403a-97d1-38b015d129c5-kube-api-access-b7k8c\") pod \"certified-operators-hhh9g\" (UID: \"048b5ca4-f20c-403a-97d1-38b015d129c5\") " pod="openshift-marketplace/certified-operators-hhh9g" Sep 30 00:43:35 crc kubenswrapper[4809]: I0930 00:43:35.502424 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/048b5ca4-f20c-403a-97d1-38b015d129c5-catalog-content\") pod \"certified-operators-hhh9g\" (UID: \"048b5ca4-f20c-403a-97d1-38b015d129c5\") " pod="openshift-marketplace/certified-operators-hhh9g" Sep 30 00:43:35 crc kubenswrapper[4809]: I0930 00:43:35.502840 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/048b5ca4-f20c-403a-97d1-38b015d129c5-utilities\") pod \"certified-operators-hhh9g\" (UID: \"048b5ca4-f20c-403a-97d1-38b015d129c5\") " pod="openshift-marketplace/certified-operators-hhh9g" Sep 30 00:43:35 crc kubenswrapper[4809]: I0930 00:43:35.502946 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/048b5ca4-f20c-403a-97d1-38b015d129c5-catalog-content\") pod \"certified-operators-hhh9g\" (UID: \"048b5ca4-f20c-403a-97d1-38b015d129c5\") " pod="openshift-marketplace/certified-operators-hhh9g" Sep 30 00:43:35 crc kubenswrapper[4809]: I0930 00:43:35.523334 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7k8c\" (UniqueName: \"kubernetes.io/projected/048b5ca4-f20c-403a-97d1-38b015d129c5-kube-api-access-b7k8c\") pod \"certified-operators-hhh9g\" (UID: \"048b5ca4-f20c-403a-97d1-38b015d129c5\") " pod="openshift-marketplace/certified-operators-hhh9g" Sep 30 00:43:35 crc kubenswrapper[4809]: I0930 00:43:35.679049 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hhh9g" Sep 30 00:43:36 crc kubenswrapper[4809]: I0930 00:43:36.250372 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hhh9g"] Sep 30 00:43:37 crc kubenswrapper[4809]: I0930 00:43:37.027538 4809 generic.go:334] "Generic (PLEG): container finished" podID="048b5ca4-f20c-403a-97d1-38b015d129c5" containerID="b767b7e0371925c1f109eaaeeb9c0ec81a595cd99257d41c0883a5577dbe1239" exitCode=0 Sep 30 00:43:37 crc kubenswrapper[4809]: I0930 00:43:37.027672 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hhh9g" event={"ID":"048b5ca4-f20c-403a-97d1-38b015d129c5","Type":"ContainerDied","Data":"b767b7e0371925c1f109eaaeeb9c0ec81a595cd99257d41c0883a5577dbe1239"} Sep 30 00:43:37 crc kubenswrapper[4809]: I0930 00:43:37.027933 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hhh9g" event={"ID":"048b5ca4-f20c-403a-97d1-38b015d129c5","Type":"ContainerStarted","Data":"c4f65497fa3197557e06f4eae0bde6f7c1d8d495f0e41df5c04a8c0a0b95e3dd"} Sep 30 00:43:38 crc kubenswrapper[4809]: I0930 00:43:38.043029 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hhh9g" event={"ID":"048b5ca4-f20c-403a-97d1-38b015d129c5","Type":"ContainerStarted","Data":"98ff4e7283078d2f0cfaa2c6d5778498cd57b27e9ac55b569233588c4cd89b51"} Sep 30 00:43:40 crc kubenswrapper[4809]: I0930 00:43:40.066077 4809 generic.go:334] "Generic (PLEG): container finished" podID="048b5ca4-f20c-403a-97d1-38b015d129c5" containerID="98ff4e7283078d2f0cfaa2c6d5778498cd57b27e9ac55b569233588c4cd89b51" exitCode=0 Sep 30 00:43:40 crc kubenswrapper[4809]: I0930 00:43:40.066309 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hhh9g" event={"ID":"048b5ca4-f20c-403a-97d1-38b015d129c5","Type":"ContainerDied","Data":"98ff4e7283078d2f0cfaa2c6d5778498cd57b27e9ac55b569233588c4cd89b51"} Sep 30 00:43:41 crc kubenswrapper[4809]: I0930 00:43:41.078644 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hhh9g" event={"ID":"048b5ca4-f20c-403a-97d1-38b015d129c5","Type":"ContainerStarted","Data":"d17a2f5793a5fdaeb342cad5d405aeefef50034dd8f3e6cf09fcb019e8720b52"} Sep 30 00:43:41 crc kubenswrapper[4809]: I0930 00:43:41.103407 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hhh9g" podStartSLOduration=2.546046833 podStartE2EDuration="6.10338521s" podCreationTimestamp="2025-09-30 00:43:35 +0000 UTC" firstStartedPulling="2025-09-30 00:43:37.030518943 +0000 UTC m=+2068.066768351" lastFinishedPulling="2025-09-30 00:43:40.58785731 +0000 UTC m=+2071.624106728" observedRunningTime="2025-09-30 00:43:41.093867662 +0000 UTC m=+2072.130117090" watchObservedRunningTime="2025-09-30 00:43:41.10338521 +0000 UTC m=+2072.139634618" Sep 30 00:43:45 crc kubenswrapper[4809]: I0930 00:43:45.679202 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hhh9g" Sep 30 00:43:45 crc kubenswrapper[4809]: I0930 00:43:45.679803 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hhh9g" Sep 30 00:43:45 crc kubenswrapper[4809]: I0930 00:43:45.738062 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hhh9g" Sep 30 00:43:46 crc kubenswrapper[4809]: I0930 00:43:46.198088 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hhh9g" Sep 30 00:43:46 crc kubenswrapper[4809]: I0930 00:43:46.260099 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hhh9g"] Sep 30 00:43:48 crc kubenswrapper[4809]: I0930 00:43:48.156007 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hhh9g" podUID="048b5ca4-f20c-403a-97d1-38b015d129c5" containerName="registry-server" containerID="cri-o://d17a2f5793a5fdaeb342cad5d405aeefef50034dd8f3e6cf09fcb019e8720b52" gracePeriod=2 Sep 30 00:43:48 crc kubenswrapper[4809]: I0930 00:43:48.663176 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hhh9g" Sep 30 00:43:48 crc kubenswrapper[4809]: I0930 00:43:48.758427 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/048b5ca4-f20c-403a-97d1-38b015d129c5-catalog-content\") pod \"048b5ca4-f20c-403a-97d1-38b015d129c5\" (UID: \"048b5ca4-f20c-403a-97d1-38b015d129c5\") " Sep 30 00:43:48 crc kubenswrapper[4809]: I0930 00:43:48.758583 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7k8c\" (UniqueName: \"kubernetes.io/projected/048b5ca4-f20c-403a-97d1-38b015d129c5-kube-api-access-b7k8c\") pod \"048b5ca4-f20c-403a-97d1-38b015d129c5\" (UID: \"048b5ca4-f20c-403a-97d1-38b015d129c5\") " Sep 30 00:43:48 crc kubenswrapper[4809]: I0930 00:43:48.758745 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/048b5ca4-f20c-403a-97d1-38b015d129c5-utilities\") pod \"048b5ca4-f20c-403a-97d1-38b015d129c5\" (UID: \"048b5ca4-f20c-403a-97d1-38b015d129c5\") " Sep 30 00:43:48 crc kubenswrapper[4809]: I0930 00:43:48.759725 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/048b5ca4-f20c-403a-97d1-38b015d129c5-utilities" (OuterVolumeSpecName: "utilities") pod "048b5ca4-f20c-403a-97d1-38b015d129c5" (UID: "048b5ca4-f20c-403a-97d1-38b015d129c5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:43:48 crc kubenswrapper[4809]: I0930 00:43:48.766341 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/048b5ca4-f20c-403a-97d1-38b015d129c5-kube-api-access-b7k8c" (OuterVolumeSpecName: "kube-api-access-b7k8c") pod "048b5ca4-f20c-403a-97d1-38b015d129c5" (UID: "048b5ca4-f20c-403a-97d1-38b015d129c5"). InnerVolumeSpecName "kube-api-access-b7k8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:43:48 crc kubenswrapper[4809]: I0930 00:43:48.810690 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/048b5ca4-f20c-403a-97d1-38b015d129c5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "048b5ca4-f20c-403a-97d1-38b015d129c5" (UID: "048b5ca4-f20c-403a-97d1-38b015d129c5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:43:48 crc kubenswrapper[4809]: I0930 00:43:48.861846 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7k8c\" (UniqueName: \"kubernetes.io/projected/048b5ca4-f20c-403a-97d1-38b015d129c5-kube-api-access-b7k8c\") on node \"crc\" DevicePath \"\"" Sep 30 00:43:48 crc kubenswrapper[4809]: I0930 00:43:48.861880 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/048b5ca4-f20c-403a-97d1-38b015d129c5-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:43:48 crc kubenswrapper[4809]: I0930 00:43:48.861890 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/048b5ca4-f20c-403a-97d1-38b015d129c5-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:43:49 crc kubenswrapper[4809]: I0930 00:43:49.166602 4809 generic.go:334] "Generic (PLEG): container finished" podID="048b5ca4-f20c-403a-97d1-38b015d129c5" containerID="d17a2f5793a5fdaeb342cad5d405aeefef50034dd8f3e6cf09fcb019e8720b52" exitCode=0 Sep 30 00:43:49 crc kubenswrapper[4809]: I0930 00:43:49.166705 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hhh9g" event={"ID":"048b5ca4-f20c-403a-97d1-38b015d129c5","Type":"ContainerDied","Data":"d17a2f5793a5fdaeb342cad5d405aeefef50034dd8f3e6cf09fcb019e8720b52"} Sep 30 00:43:49 crc kubenswrapper[4809]: I0930 00:43:49.166756 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hhh9g" event={"ID":"048b5ca4-f20c-403a-97d1-38b015d129c5","Type":"ContainerDied","Data":"c4f65497fa3197557e06f4eae0bde6f7c1d8d495f0e41df5c04a8c0a0b95e3dd"} Sep 30 00:43:49 crc kubenswrapper[4809]: I0930 00:43:49.166774 4809 scope.go:117] "RemoveContainer" containerID="d17a2f5793a5fdaeb342cad5d405aeefef50034dd8f3e6cf09fcb019e8720b52" Sep 30 00:43:49 crc kubenswrapper[4809]: I0930 00:43:49.166693 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hhh9g" Sep 30 00:43:49 crc kubenswrapper[4809]: I0930 00:43:49.189598 4809 scope.go:117] "RemoveContainer" containerID="98ff4e7283078d2f0cfaa2c6d5778498cd57b27e9ac55b569233588c4cd89b51" Sep 30 00:43:49 crc kubenswrapper[4809]: I0930 00:43:49.204691 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hhh9g"] Sep 30 00:43:49 crc kubenswrapper[4809]: I0930 00:43:49.213614 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hhh9g"] Sep 30 00:43:49 crc kubenswrapper[4809]: I0930 00:43:49.231683 4809 scope.go:117] "RemoveContainer" containerID="b767b7e0371925c1f109eaaeeb9c0ec81a595cd99257d41c0883a5577dbe1239" Sep 30 00:43:49 crc kubenswrapper[4809]: I0930 00:43:49.295805 4809 scope.go:117] "RemoveContainer" containerID="d17a2f5793a5fdaeb342cad5d405aeefef50034dd8f3e6cf09fcb019e8720b52" Sep 30 00:43:49 crc kubenswrapper[4809]: E0930 00:43:49.296268 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d17a2f5793a5fdaeb342cad5d405aeefef50034dd8f3e6cf09fcb019e8720b52\": container with ID starting with d17a2f5793a5fdaeb342cad5d405aeefef50034dd8f3e6cf09fcb019e8720b52 not found: ID does not exist" containerID="d17a2f5793a5fdaeb342cad5d405aeefef50034dd8f3e6cf09fcb019e8720b52" Sep 30 00:43:49 crc kubenswrapper[4809]: I0930 00:43:49.296320 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d17a2f5793a5fdaeb342cad5d405aeefef50034dd8f3e6cf09fcb019e8720b52"} err="failed to get container status \"d17a2f5793a5fdaeb342cad5d405aeefef50034dd8f3e6cf09fcb019e8720b52\": rpc error: code = NotFound desc = could not find container \"d17a2f5793a5fdaeb342cad5d405aeefef50034dd8f3e6cf09fcb019e8720b52\": container with ID starting with d17a2f5793a5fdaeb342cad5d405aeefef50034dd8f3e6cf09fcb019e8720b52 not found: ID does not exist" Sep 30 00:43:49 crc kubenswrapper[4809]: I0930 00:43:49.296358 4809 scope.go:117] "RemoveContainer" containerID="98ff4e7283078d2f0cfaa2c6d5778498cd57b27e9ac55b569233588c4cd89b51" Sep 30 00:43:49 crc kubenswrapper[4809]: E0930 00:43:49.297016 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98ff4e7283078d2f0cfaa2c6d5778498cd57b27e9ac55b569233588c4cd89b51\": container with ID starting with 98ff4e7283078d2f0cfaa2c6d5778498cd57b27e9ac55b569233588c4cd89b51 not found: ID does not exist" containerID="98ff4e7283078d2f0cfaa2c6d5778498cd57b27e9ac55b569233588c4cd89b51" Sep 30 00:43:49 crc kubenswrapper[4809]: I0930 00:43:49.297055 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98ff4e7283078d2f0cfaa2c6d5778498cd57b27e9ac55b569233588c4cd89b51"} err="failed to get container status \"98ff4e7283078d2f0cfaa2c6d5778498cd57b27e9ac55b569233588c4cd89b51\": rpc error: code = NotFound desc = could not find container \"98ff4e7283078d2f0cfaa2c6d5778498cd57b27e9ac55b569233588c4cd89b51\": container with ID starting with 98ff4e7283078d2f0cfaa2c6d5778498cd57b27e9ac55b569233588c4cd89b51 not found: ID does not exist" Sep 30 00:43:49 crc kubenswrapper[4809]: I0930 00:43:49.297082 4809 scope.go:117] "RemoveContainer" containerID="b767b7e0371925c1f109eaaeeb9c0ec81a595cd99257d41c0883a5577dbe1239" Sep 30 00:43:49 crc kubenswrapper[4809]: E0930 00:43:49.297518 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b767b7e0371925c1f109eaaeeb9c0ec81a595cd99257d41c0883a5577dbe1239\": container with ID starting with b767b7e0371925c1f109eaaeeb9c0ec81a595cd99257d41c0883a5577dbe1239 not found: ID does not exist" containerID="b767b7e0371925c1f109eaaeeb9c0ec81a595cd99257d41c0883a5577dbe1239" Sep 30 00:43:49 crc kubenswrapper[4809]: I0930 00:43:49.297545 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b767b7e0371925c1f109eaaeeb9c0ec81a595cd99257d41c0883a5577dbe1239"} err="failed to get container status \"b767b7e0371925c1f109eaaeeb9c0ec81a595cd99257d41c0883a5577dbe1239\": rpc error: code = NotFound desc = could not find container \"b767b7e0371925c1f109eaaeeb9c0ec81a595cd99257d41c0883a5577dbe1239\": container with ID starting with b767b7e0371925c1f109eaaeeb9c0ec81a595cd99257d41c0883a5577dbe1239 not found: ID does not exist" Sep 30 00:43:49 crc kubenswrapper[4809]: I0930 00:43:49.701664 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="048b5ca4-f20c-403a-97d1-38b015d129c5" path="/var/lib/kubelet/pods/048b5ca4-f20c-403a-97d1-38b015d129c5/volumes" Sep 30 00:44:04 crc kubenswrapper[4809]: I0930 00:44:04.226262 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fmlnz"] Sep 30 00:44:04 crc kubenswrapper[4809]: E0930 00:44:04.227327 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="048b5ca4-f20c-403a-97d1-38b015d129c5" containerName="extract-content" Sep 30 00:44:04 crc kubenswrapper[4809]: I0930 00:44:04.227341 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="048b5ca4-f20c-403a-97d1-38b015d129c5" containerName="extract-content" Sep 30 00:44:04 crc kubenswrapper[4809]: E0930 00:44:04.227362 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="048b5ca4-f20c-403a-97d1-38b015d129c5" containerName="registry-server" Sep 30 00:44:04 crc kubenswrapper[4809]: I0930 00:44:04.227389 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="048b5ca4-f20c-403a-97d1-38b015d129c5" containerName="registry-server" Sep 30 00:44:04 crc kubenswrapper[4809]: E0930 00:44:04.227451 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="048b5ca4-f20c-403a-97d1-38b015d129c5" containerName="extract-utilities" Sep 30 00:44:04 crc kubenswrapper[4809]: I0930 00:44:04.227459 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="048b5ca4-f20c-403a-97d1-38b015d129c5" containerName="extract-utilities" Sep 30 00:44:04 crc kubenswrapper[4809]: I0930 00:44:04.227884 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="048b5ca4-f20c-403a-97d1-38b015d129c5" containerName="registry-server" Sep 30 00:44:04 crc kubenswrapper[4809]: I0930 00:44:04.229519 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fmlnz" Sep 30 00:44:04 crc kubenswrapper[4809]: I0930 00:44:04.260621 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fmlnz"] Sep 30 00:44:04 crc kubenswrapper[4809]: I0930 00:44:04.320631 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65f83828-2cfd-4dc0-8d15-fc0649df37c3-utilities\") pod \"redhat-marketplace-fmlnz\" (UID: \"65f83828-2cfd-4dc0-8d15-fc0649df37c3\") " pod="openshift-marketplace/redhat-marketplace-fmlnz" Sep 30 00:44:04 crc kubenswrapper[4809]: I0930 00:44:04.320785 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wr686\" (UniqueName: \"kubernetes.io/projected/65f83828-2cfd-4dc0-8d15-fc0649df37c3-kube-api-access-wr686\") pod \"redhat-marketplace-fmlnz\" (UID: \"65f83828-2cfd-4dc0-8d15-fc0649df37c3\") " pod="openshift-marketplace/redhat-marketplace-fmlnz" Sep 30 00:44:04 crc kubenswrapper[4809]: I0930 00:44:04.320825 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65f83828-2cfd-4dc0-8d15-fc0649df37c3-catalog-content\") pod \"redhat-marketplace-fmlnz\" (UID: \"65f83828-2cfd-4dc0-8d15-fc0649df37c3\") " pod="openshift-marketplace/redhat-marketplace-fmlnz" Sep 30 00:44:04 crc kubenswrapper[4809]: I0930 00:44:04.423051 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65f83828-2cfd-4dc0-8d15-fc0649df37c3-utilities\") pod \"redhat-marketplace-fmlnz\" (UID: \"65f83828-2cfd-4dc0-8d15-fc0649df37c3\") " pod="openshift-marketplace/redhat-marketplace-fmlnz" Sep 30 00:44:04 crc kubenswrapper[4809]: I0930 00:44:04.423125 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wr686\" (UniqueName: \"kubernetes.io/projected/65f83828-2cfd-4dc0-8d15-fc0649df37c3-kube-api-access-wr686\") pod \"redhat-marketplace-fmlnz\" (UID: \"65f83828-2cfd-4dc0-8d15-fc0649df37c3\") " pod="openshift-marketplace/redhat-marketplace-fmlnz" Sep 30 00:44:04 crc kubenswrapper[4809]: I0930 00:44:04.423150 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65f83828-2cfd-4dc0-8d15-fc0649df37c3-catalog-content\") pod \"redhat-marketplace-fmlnz\" (UID: \"65f83828-2cfd-4dc0-8d15-fc0649df37c3\") " pod="openshift-marketplace/redhat-marketplace-fmlnz" Sep 30 00:44:04 crc kubenswrapper[4809]: I0930 00:44:04.423603 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65f83828-2cfd-4dc0-8d15-fc0649df37c3-utilities\") pod \"redhat-marketplace-fmlnz\" (UID: \"65f83828-2cfd-4dc0-8d15-fc0649df37c3\") " pod="openshift-marketplace/redhat-marketplace-fmlnz" Sep 30 00:44:04 crc kubenswrapper[4809]: I0930 00:44:04.423674 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65f83828-2cfd-4dc0-8d15-fc0649df37c3-catalog-content\") pod \"redhat-marketplace-fmlnz\" (UID: \"65f83828-2cfd-4dc0-8d15-fc0649df37c3\") " pod="openshift-marketplace/redhat-marketplace-fmlnz" Sep 30 00:44:04 crc kubenswrapper[4809]: I0930 00:44:04.447554 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wr686\" (UniqueName: \"kubernetes.io/projected/65f83828-2cfd-4dc0-8d15-fc0649df37c3-kube-api-access-wr686\") pod \"redhat-marketplace-fmlnz\" (UID: \"65f83828-2cfd-4dc0-8d15-fc0649df37c3\") " pod="openshift-marketplace/redhat-marketplace-fmlnz" Sep 30 00:44:04 crc kubenswrapper[4809]: I0930 00:44:04.553168 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fmlnz" Sep 30 00:44:05 crc kubenswrapper[4809]: I0930 00:44:05.070501 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fmlnz"] Sep 30 00:44:05 crc kubenswrapper[4809]: I0930 00:44:05.357074 4809 generic.go:334] "Generic (PLEG): container finished" podID="65f83828-2cfd-4dc0-8d15-fc0649df37c3" containerID="ca0691b2c17170800f43b59f5a914f2dbbb67eb0f3553ad9dd6fcec6c8ab05df" exitCode=0 Sep 30 00:44:05 crc kubenswrapper[4809]: I0930 00:44:05.357114 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fmlnz" event={"ID":"65f83828-2cfd-4dc0-8d15-fc0649df37c3","Type":"ContainerDied","Data":"ca0691b2c17170800f43b59f5a914f2dbbb67eb0f3553ad9dd6fcec6c8ab05df"} Sep 30 00:44:05 crc kubenswrapper[4809]: I0930 00:44:05.357388 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fmlnz" event={"ID":"65f83828-2cfd-4dc0-8d15-fc0649df37c3","Type":"ContainerStarted","Data":"90e4b51842876aace70b42401a263b0b5fe15e2b8dfbd0dc8edc6eec4ddcba44"} Sep 30 00:44:06 crc kubenswrapper[4809]: I0930 00:44:06.371424 4809 generic.go:334] "Generic (PLEG): container finished" podID="4e6d97aa-36d0-45f9-ae52-b14487f4cb5d" containerID="a32a0882f766b52834b1c9850e71ae9b6dd9c6ba28183f75a6d6f3f5dafe2bcd" exitCode=0 Sep 30 00:44:06 crc kubenswrapper[4809]: I0930 00:44:06.371572 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" event={"ID":"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d","Type":"ContainerDied","Data":"a32a0882f766b52834b1c9850e71ae9b6dd9c6ba28183f75a6d6f3f5dafe2bcd"} Sep 30 00:44:06 crc kubenswrapper[4809]: I0930 00:44:06.376194 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fmlnz" event={"ID":"65f83828-2cfd-4dc0-8d15-fc0649df37c3","Type":"ContainerStarted","Data":"a49b998ae241d06a7c286b1d03c744f4d5f781c8b6c51fd03ac31568bf459a88"} Sep 30 00:44:07 crc kubenswrapper[4809]: I0930 00:44:07.389662 4809 generic.go:334] "Generic (PLEG): container finished" podID="65f83828-2cfd-4dc0-8d15-fc0649df37c3" containerID="a49b998ae241d06a7c286b1d03c744f4d5f781c8b6c51fd03ac31568bf459a88" exitCode=0 Sep 30 00:44:07 crc kubenswrapper[4809]: I0930 00:44:07.389717 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fmlnz" event={"ID":"65f83828-2cfd-4dc0-8d15-fc0649df37c3","Type":"ContainerDied","Data":"a49b998ae241d06a7c286b1d03c744f4d5f781c8b6c51fd03ac31568bf459a88"} Sep 30 00:44:07 crc kubenswrapper[4809]: I0930 00:44:07.803247 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" Sep 30 00:44:07 crc kubenswrapper[4809]: I0930 00:44:07.902421 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnfbt\" (UniqueName: \"kubernetes.io/projected/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-kube-api-access-nnfbt\") pod \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\" (UID: \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\") " Sep 30 00:44:07 crc kubenswrapper[4809]: I0930 00:44:07.902928 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-ssh-key\") pod \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\" (UID: \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\") " Sep 30 00:44:07 crc kubenswrapper[4809]: I0930 00:44:07.903036 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-ovn-combined-ca-bundle\") pod \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\" (UID: \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\") " Sep 30 00:44:07 crc kubenswrapper[4809]: I0930 00:44:07.903112 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-inventory\") pod \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\" (UID: \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\") " Sep 30 00:44:07 crc kubenswrapper[4809]: I0930 00:44:07.903142 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-ovncontroller-config-0\") pod \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\" (UID: \"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d\") " Sep 30 00:44:07 crc kubenswrapper[4809]: I0930 00:44:07.949812 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "4e6d97aa-36d0-45f9-ae52-b14487f4cb5d" (UID: "4e6d97aa-36d0-45f9-ae52-b14487f4cb5d"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:44:07 crc kubenswrapper[4809]: I0930 00:44:07.973280 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-kube-api-access-nnfbt" (OuterVolumeSpecName: "kube-api-access-nnfbt") pod "4e6d97aa-36d0-45f9-ae52-b14487f4cb5d" (UID: "4e6d97aa-36d0-45f9-ae52-b14487f4cb5d"). InnerVolumeSpecName "kube-api-access-nnfbt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:44:07 crc kubenswrapper[4809]: I0930 00:44:07.977001 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "4e6d97aa-36d0-45f9-ae52-b14487f4cb5d" (UID: "4e6d97aa-36d0-45f9-ae52-b14487f4cb5d"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:44:07 crc kubenswrapper[4809]: I0930 00:44:07.982365 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-inventory" (OuterVolumeSpecName: "inventory") pod "4e6d97aa-36d0-45f9-ae52-b14487f4cb5d" (UID: "4e6d97aa-36d0-45f9-ae52-b14487f4cb5d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.003855 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4e6d97aa-36d0-45f9-ae52-b14487f4cb5d" (UID: "4e6d97aa-36d0-45f9-ae52-b14487f4cb5d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.009190 4809 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.009235 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.009250 4809 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.009260 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnfbt\" (UniqueName: \"kubernetes.io/projected/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-kube-api-access-nnfbt\") on node \"crc\" DevicePath \"\"" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.009271 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.402777 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fmlnz" event={"ID":"65f83828-2cfd-4dc0-8d15-fc0649df37c3","Type":"ContainerStarted","Data":"0a2cbf30e5944be0fded4f63c459adf3c3ce02c8223166a73639518750dc1192"} Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.405484 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" event={"ID":"4e6d97aa-36d0-45f9-ae52-b14487f4cb5d","Type":"ContainerDied","Data":"da79ada59f6f8fe916d575327ba654695060ded6b98659eca3768efda4b42020"} Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.405530 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da79ada59f6f8fe916d575327ba654695060ded6b98659eca3768efda4b42020" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.405614 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.441625 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fmlnz" podStartSLOduration=1.8831960909999998 podStartE2EDuration="4.441608639s" podCreationTimestamp="2025-09-30 00:44:04 +0000 UTC" firstStartedPulling="2025-09-30 00:44:05.358858491 +0000 UTC m=+2096.395107899" lastFinishedPulling="2025-09-30 00:44:07.917271039 +0000 UTC m=+2098.953520447" observedRunningTime="2025-09-30 00:44:08.438091384 +0000 UTC m=+2099.474340792" watchObservedRunningTime="2025-09-30 00:44:08.441608639 +0000 UTC m=+2099.477858047" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.501585 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx"] Sep 30 00:44:08 crc kubenswrapper[4809]: E0930 00:44:08.502129 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e6d97aa-36d0-45f9-ae52-b14487f4cb5d" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.502154 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e6d97aa-36d0-45f9-ae52-b14487f4cb5d" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.502460 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e6d97aa-36d0-45f9-ae52-b14487f4cb5d" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.503453 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.505955 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.507048 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.507272 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.507500 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.508133 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.560330 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx"] Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.621731 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx\" (UID: \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.621943 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5k47l\" (UniqueName: \"kubernetes.io/projected/8cec48e7-b769-4dc4-9e71-14a237f8aab8-kube-api-access-5k47l\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx\" (UID: \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.621970 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx\" (UID: \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.622052 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx\" (UID: \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.622075 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx\" (UID: \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.724076 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx\" (UID: \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.724152 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx\" (UID: \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.724188 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx\" (UID: \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.724322 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5k47l\" (UniqueName: \"kubernetes.io/projected/8cec48e7-b769-4dc4-9e71-14a237f8aab8-kube-api-access-5k47l\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx\" (UID: \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.724343 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx\" (UID: \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.728974 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx\" (UID: \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.728977 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx\" (UID: \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.735170 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx\" (UID: \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.739877 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx\" (UID: \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.740741 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5k47l\" (UniqueName: \"kubernetes.io/projected/8cec48e7-b769-4dc4-9e71-14a237f8aab8-kube-api-access-5k47l\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx\" (UID: \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" Sep 30 00:44:08 crc kubenswrapper[4809]: I0930 00:44:08.829248 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" Sep 30 00:44:09 crc kubenswrapper[4809]: I0930 00:44:09.389257 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx"] Sep 30 00:44:09 crc kubenswrapper[4809]: I0930 00:44:09.417349 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" event={"ID":"8cec48e7-b769-4dc4-9e71-14a237f8aab8","Type":"ContainerStarted","Data":"d904a6a55bc58c694629f9989f83034e36718fc571654eab8ac24e4b93219cbf"} Sep 30 00:44:10 crc kubenswrapper[4809]: I0930 00:44:10.040007 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:44:10 crc kubenswrapper[4809]: I0930 00:44:10.428880 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" event={"ID":"8cec48e7-b769-4dc4-9e71-14a237f8aab8","Type":"ContainerStarted","Data":"b141a3642a92ae95db9d4a6e493fe12dbd4a13a55a52fc01d81cf07fe7bf6f06"} Sep 30 00:44:10 crc kubenswrapper[4809]: I0930 00:44:10.456163 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" podStartSLOduration=1.802198525 podStartE2EDuration="2.456144867s" podCreationTimestamp="2025-09-30 00:44:08 +0000 UTC" firstStartedPulling="2025-09-30 00:44:09.383196038 +0000 UTC m=+2100.419445446" lastFinishedPulling="2025-09-30 00:44:10.03714238 +0000 UTC m=+2101.073391788" observedRunningTime="2025-09-30 00:44:10.448112089 +0000 UTC m=+2101.484361547" watchObservedRunningTime="2025-09-30 00:44:10.456144867 +0000 UTC m=+2101.492394275" Sep 30 00:44:14 crc kubenswrapper[4809]: I0930 00:44:14.045628 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-5gbcd"] Sep 30 00:44:14 crc kubenswrapper[4809]: I0930 00:44:14.054799 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-5gbcd"] Sep 30 00:44:14 crc kubenswrapper[4809]: I0930 00:44:14.553329 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fmlnz" Sep 30 00:44:14 crc kubenswrapper[4809]: I0930 00:44:14.553396 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fmlnz" Sep 30 00:44:14 crc kubenswrapper[4809]: I0930 00:44:14.642417 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fmlnz" Sep 30 00:44:15 crc kubenswrapper[4809]: I0930 00:44:15.533582 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fmlnz" Sep 30 00:44:15 crc kubenswrapper[4809]: I0930 00:44:15.586683 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fmlnz"] Sep 30 00:44:15 crc kubenswrapper[4809]: I0930 00:44:15.706425 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="feb60974-e32b-4f1e-bcd2-2647c3dc05eb" path="/var/lib/kubelet/pods/feb60974-e32b-4f1e-bcd2-2647c3dc05eb/volumes" Sep 30 00:44:17 crc kubenswrapper[4809]: I0930 00:44:17.510987 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fmlnz" podUID="65f83828-2cfd-4dc0-8d15-fc0649df37c3" containerName="registry-server" containerID="cri-o://0a2cbf30e5944be0fded4f63c459adf3c3ce02c8223166a73639518750dc1192" gracePeriod=2 Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.000854 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fmlnz" Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.135845 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wr686\" (UniqueName: \"kubernetes.io/projected/65f83828-2cfd-4dc0-8d15-fc0649df37c3-kube-api-access-wr686\") pod \"65f83828-2cfd-4dc0-8d15-fc0649df37c3\" (UID: \"65f83828-2cfd-4dc0-8d15-fc0649df37c3\") " Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.136059 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65f83828-2cfd-4dc0-8d15-fc0649df37c3-utilities\") pod \"65f83828-2cfd-4dc0-8d15-fc0649df37c3\" (UID: \"65f83828-2cfd-4dc0-8d15-fc0649df37c3\") " Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.136092 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65f83828-2cfd-4dc0-8d15-fc0649df37c3-catalog-content\") pod \"65f83828-2cfd-4dc0-8d15-fc0649df37c3\" (UID: \"65f83828-2cfd-4dc0-8d15-fc0649df37c3\") " Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.136868 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65f83828-2cfd-4dc0-8d15-fc0649df37c3-utilities" (OuterVolumeSpecName: "utilities") pod "65f83828-2cfd-4dc0-8d15-fc0649df37c3" (UID: "65f83828-2cfd-4dc0-8d15-fc0649df37c3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.141993 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65f83828-2cfd-4dc0-8d15-fc0649df37c3-kube-api-access-wr686" (OuterVolumeSpecName: "kube-api-access-wr686") pod "65f83828-2cfd-4dc0-8d15-fc0649df37c3" (UID: "65f83828-2cfd-4dc0-8d15-fc0649df37c3"). InnerVolumeSpecName "kube-api-access-wr686". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.149132 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65f83828-2cfd-4dc0-8d15-fc0649df37c3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "65f83828-2cfd-4dc0-8d15-fc0649df37c3" (UID: "65f83828-2cfd-4dc0-8d15-fc0649df37c3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.237860 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65f83828-2cfd-4dc0-8d15-fc0649df37c3-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.237889 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65f83828-2cfd-4dc0-8d15-fc0649df37c3-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.237900 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wr686\" (UniqueName: \"kubernetes.io/projected/65f83828-2cfd-4dc0-8d15-fc0649df37c3-kube-api-access-wr686\") on node \"crc\" DevicePath \"\"" Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.521805 4809 generic.go:334] "Generic (PLEG): container finished" podID="65f83828-2cfd-4dc0-8d15-fc0649df37c3" containerID="0a2cbf30e5944be0fded4f63c459adf3c3ce02c8223166a73639518750dc1192" exitCode=0 Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.521849 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fmlnz" event={"ID":"65f83828-2cfd-4dc0-8d15-fc0649df37c3","Type":"ContainerDied","Data":"0a2cbf30e5944be0fded4f63c459adf3c3ce02c8223166a73639518750dc1192"} Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.521900 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fmlnz" event={"ID":"65f83828-2cfd-4dc0-8d15-fc0649df37c3","Type":"ContainerDied","Data":"90e4b51842876aace70b42401a263b0b5fe15e2b8dfbd0dc8edc6eec4ddcba44"} Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.521916 4809 scope.go:117] "RemoveContainer" containerID="0a2cbf30e5944be0fded4f63c459adf3c3ce02c8223166a73639518750dc1192" Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.523351 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fmlnz" Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.548183 4809 scope.go:117] "RemoveContainer" containerID="a49b998ae241d06a7c286b1d03c744f4d5f781c8b6c51fd03ac31568bf459a88" Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.565535 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fmlnz"] Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.577215 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fmlnz"] Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.583041 4809 scope.go:117] "RemoveContainer" containerID="ca0691b2c17170800f43b59f5a914f2dbbb67eb0f3553ad9dd6fcec6c8ab05df" Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.632862 4809 scope.go:117] "RemoveContainer" containerID="0a2cbf30e5944be0fded4f63c459adf3c3ce02c8223166a73639518750dc1192" Sep 30 00:44:18 crc kubenswrapper[4809]: E0930 00:44:18.633299 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a2cbf30e5944be0fded4f63c459adf3c3ce02c8223166a73639518750dc1192\": container with ID starting with 0a2cbf30e5944be0fded4f63c459adf3c3ce02c8223166a73639518750dc1192 not found: ID does not exist" containerID="0a2cbf30e5944be0fded4f63c459adf3c3ce02c8223166a73639518750dc1192" Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.633332 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a2cbf30e5944be0fded4f63c459adf3c3ce02c8223166a73639518750dc1192"} err="failed to get container status \"0a2cbf30e5944be0fded4f63c459adf3c3ce02c8223166a73639518750dc1192\": rpc error: code = NotFound desc = could not find container \"0a2cbf30e5944be0fded4f63c459adf3c3ce02c8223166a73639518750dc1192\": container with ID starting with 0a2cbf30e5944be0fded4f63c459adf3c3ce02c8223166a73639518750dc1192 not found: ID does not exist" Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.633361 4809 scope.go:117] "RemoveContainer" containerID="a49b998ae241d06a7c286b1d03c744f4d5f781c8b6c51fd03ac31568bf459a88" Sep 30 00:44:18 crc kubenswrapper[4809]: E0930 00:44:18.633863 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a49b998ae241d06a7c286b1d03c744f4d5f781c8b6c51fd03ac31568bf459a88\": container with ID starting with a49b998ae241d06a7c286b1d03c744f4d5f781c8b6c51fd03ac31568bf459a88 not found: ID does not exist" containerID="a49b998ae241d06a7c286b1d03c744f4d5f781c8b6c51fd03ac31568bf459a88" Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.633912 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a49b998ae241d06a7c286b1d03c744f4d5f781c8b6c51fd03ac31568bf459a88"} err="failed to get container status \"a49b998ae241d06a7c286b1d03c744f4d5f781c8b6c51fd03ac31568bf459a88\": rpc error: code = NotFound desc = could not find container \"a49b998ae241d06a7c286b1d03c744f4d5f781c8b6c51fd03ac31568bf459a88\": container with ID starting with a49b998ae241d06a7c286b1d03c744f4d5f781c8b6c51fd03ac31568bf459a88 not found: ID does not exist" Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.633942 4809 scope.go:117] "RemoveContainer" containerID="ca0691b2c17170800f43b59f5a914f2dbbb67eb0f3553ad9dd6fcec6c8ab05df" Sep 30 00:44:18 crc kubenswrapper[4809]: E0930 00:44:18.634257 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca0691b2c17170800f43b59f5a914f2dbbb67eb0f3553ad9dd6fcec6c8ab05df\": container with ID starting with ca0691b2c17170800f43b59f5a914f2dbbb67eb0f3553ad9dd6fcec6c8ab05df not found: ID does not exist" containerID="ca0691b2c17170800f43b59f5a914f2dbbb67eb0f3553ad9dd6fcec6c8ab05df" Sep 30 00:44:18 crc kubenswrapper[4809]: I0930 00:44:18.634292 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca0691b2c17170800f43b59f5a914f2dbbb67eb0f3553ad9dd6fcec6c8ab05df"} err="failed to get container status \"ca0691b2c17170800f43b59f5a914f2dbbb67eb0f3553ad9dd6fcec6c8ab05df\": rpc error: code = NotFound desc = could not find container \"ca0691b2c17170800f43b59f5a914f2dbbb67eb0f3553ad9dd6fcec6c8ab05df\": container with ID starting with ca0691b2c17170800f43b59f5a914f2dbbb67eb0f3553ad9dd6fcec6c8ab05df not found: ID does not exist" Sep 30 00:44:19 crc kubenswrapper[4809]: I0930 00:44:19.708425 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65f83828-2cfd-4dc0-8d15-fc0649df37c3" path="/var/lib/kubelet/pods/65f83828-2cfd-4dc0-8d15-fc0649df37c3/volumes" Sep 30 00:44:51 crc kubenswrapper[4809]: I0930 00:44:51.054772 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-5gd22"] Sep 30 00:44:51 crc kubenswrapper[4809]: I0930 00:44:51.068998 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-5gd22"] Sep 30 00:44:51 crc kubenswrapper[4809]: I0930 00:44:51.717504 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9759b39-ef34-4f3a-bba2-bd1421fee4c6" path="/var/lib/kubelet/pods/e9759b39-ef34-4f3a-bba2-bd1421fee4c6/volumes" Sep 30 00:45:00 crc kubenswrapper[4809]: I0930 00:45:00.154448 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8"] Sep 30 00:45:00 crc kubenswrapper[4809]: E0930 00:45:00.155517 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f83828-2cfd-4dc0-8d15-fc0649df37c3" containerName="extract-content" Sep 30 00:45:00 crc kubenswrapper[4809]: I0930 00:45:00.155535 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f83828-2cfd-4dc0-8d15-fc0649df37c3" containerName="extract-content" Sep 30 00:45:00 crc kubenswrapper[4809]: E0930 00:45:00.155584 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f83828-2cfd-4dc0-8d15-fc0649df37c3" containerName="registry-server" Sep 30 00:45:00 crc kubenswrapper[4809]: I0930 00:45:00.155592 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f83828-2cfd-4dc0-8d15-fc0649df37c3" containerName="registry-server" Sep 30 00:45:00 crc kubenswrapper[4809]: E0930 00:45:00.155619 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f83828-2cfd-4dc0-8d15-fc0649df37c3" containerName="extract-utilities" Sep 30 00:45:00 crc kubenswrapper[4809]: I0930 00:45:00.155627 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f83828-2cfd-4dc0-8d15-fc0649df37c3" containerName="extract-utilities" Sep 30 00:45:00 crc kubenswrapper[4809]: I0930 00:45:00.155878 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="65f83828-2cfd-4dc0-8d15-fc0649df37c3" containerName="registry-server" Sep 30 00:45:00 crc kubenswrapper[4809]: I0930 00:45:00.156800 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8" Sep 30 00:45:00 crc kubenswrapper[4809]: I0930 00:45:00.159500 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 00:45:00 crc kubenswrapper[4809]: I0930 00:45:00.161128 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 00:45:00 crc kubenswrapper[4809]: I0930 00:45:00.191708 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8"] Sep 30 00:45:00 crc kubenswrapper[4809]: I0930 00:45:00.334248 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/09d2f33b-d671-4c21-b1bc-babe71939120-secret-volume\") pod \"collect-profiles-29319885-ff7p8\" (UID: \"09d2f33b-d671-4c21-b1bc-babe71939120\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8" Sep 30 00:45:00 crc kubenswrapper[4809]: I0930 00:45:00.334334 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s44lg\" (UniqueName: \"kubernetes.io/projected/09d2f33b-d671-4c21-b1bc-babe71939120-kube-api-access-s44lg\") pod \"collect-profiles-29319885-ff7p8\" (UID: \"09d2f33b-d671-4c21-b1bc-babe71939120\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8" Sep 30 00:45:00 crc kubenswrapper[4809]: I0930 00:45:00.334834 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/09d2f33b-d671-4c21-b1bc-babe71939120-config-volume\") pod \"collect-profiles-29319885-ff7p8\" (UID: \"09d2f33b-d671-4c21-b1bc-babe71939120\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8" Sep 30 00:45:00 crc kubenswrapper[4809]: I0930 00:45:00.437064 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/09d2f33b-d671-4c21-b1bc-babe71939120-config-volume\") pod \"collect-profiles-29319885-ff7p8\" (UID: \"09d2f33b-d671-4c21-b1bc-babe71939120\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8" Sep 30 00:45:00 crc kubenswrapper[4809]: I0930 00:45:00.437242 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/09d2f33b-d671-4c21-b1bc-babe71939120-secret-volume\") pod \"collect-profiles-29319885-ff7p8\" (UID: \"09d2f33b-d671-4c21-b1bc-babe71939120\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8" Sep 30 00:45:00 crc kubenswrapper[4809]: I0930 00:45:00.437401 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s44lg\" (UniqueName: \"kubernetes.io/projected/09d2f33b-d671-4c21-b1bc-babe71939120-kube-api-access-s44lg\") pod \"collect-profiles-29319885-ff7p8\" (UID: \"09d2f33b-d671-4c21-b1bc-babe71939120\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8" Sep 30 00:45:00 crc kubenswrapper[4809]: I0930 00:45:00.438865 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/09d2f33b-d671-4c21-b1bc-babe71939120-config-volume\") pod \"collect-profiles-29319885-ff7p8\" (UID: \"09d2f33b-d671-4c21-b1bc-babe71939120\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8" Sep 30 00:45:00 crc kubenswrapper[4809]: I0930 00:45:00.448559 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/09d2f33b-d671-4c21-b1bc-babe71939120-secret-volume\") pod \"collect-profiles-29319885-ff7p8\" (UID: \"09d2f33b-d671-4c21-b1bc-babe71939120\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8" Sep 30 00:45:00 crc kubenswrapper[4809]: I0930 00:45:00.460756 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s44lg\" (UniqueName: \"kubernetes.io/projected/09d2f33b-d671-4c21-b1bc-babe71939120-kube-api-access-s44lg\") pod \"collect-profiles-29319885-ff7p8\" (UID: \"09d2f33b-d671-4c21-b1bc-babe71939120\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8" Sep 30 00:45:00 crc kubenswrapper[4809]: I0930 00:45:00.501436 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8" Sep 30 00:45:00 crc kubenswrapper[4809]: I0930 00:45:00.982126 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8"] Sep 30 00:45:01 crc kubenswrapper[4809]: I0930 00:45:01.957785 4809 generic.go:334] "Generic (PLEG): container finished" podID="09d2f33b-d671-4c21-b1bc-babe71939120" containerID="efb7bd62fca5b0e3f5f87719df74eb0b693b3bdd9decaae163b0fe3baba5e94a" exitCode=0 Sep 30 00:45:01 crc kubenswrapper[4809]: I0930 00:45:01.957857 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8" event={"ID":"09d2f33b-d671-4c21-b1bc-babe71939120","Type":"ContainerDied","Data":"efb7bd62fca5b0e3f5f87719df74eb0b693b3bdd9decaae163b0fe3baba5e94a"} Sep 30 00:45:01 crc kubenswrapper[4809]: I0930 00:45:01.958087 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8" event={"ID":"09d2f33b-d671-4c21-b1bc-babe71939120","Type":"ContainerStarted","Data":"c3f90ea54e112002c306fc88abc7744e766af7068cf5bfa4e5dddd6e1636483c"} Sep 30 00:45:03 crc kubenswrapper[4809]: I0930 00:45:03.385649 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8" Sep 30 00:45:03 crc kubenswrapper[4809]: I0930 00:45:03.497979 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/09d2f33b-d671-4c21-b1bc-babe71939120-config-volume\") pod \"09d2f33b-d671-4c21-b1bc-babe71939120\" (UID: \"09d2f33b-d671-4c21-b1bc-babe71939120\") " Sep 30 00:45:03 crc kubenswrapper[4809]: I0930 00:45:03.498187 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s44lg\" (UniqueName: \"kubernetes.io/projected/09d2f33b-d671-4c21-b1bc-babe71939120-kube-api-access-s44lg\") pod \"09d2f33b-d671-4c21-b1bc-babe71939120\" (UID: \"09d2f33b-d671-4c21-b1bc-babe71939120\") " Sep 30 00:45:03 crc kubenswrapper[4809]: I0930 00:45:03.498229 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/09d2f33b-d671-4c21-b1bc-babe71939120-secret-volume\") pod \"09d2f33b-d671-4c21-b1bc-babe71939120\" (UID: \"09d2f33b-d671-4c21-b1bc-babe71939120\") " Sep 30 00:45:03 crc kubenswrapper[4809]: I0930 00:45:03.498845 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09d2f33b-d671-4c21-b1bc-babe71939120-config-volume" (OuterVolumeSpecName: "config-volume") pod "09d2f33b-d671-4c21-b1bc-babe71939120" (UID: "09d2f33b-d671-4c21-b1bc-babe71939120"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:45:03 crc kubenswrapper[4809]: I0930 00:45:03.504292 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09d2f33b-d671-4c21-b1bc-babe71939120-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "09d2f33b-d671-4c21-b1bc-babe71939120" (UID: "09d2f33b-d671-4c21-b1bc-babe71939120"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:45:03 crc kubenswrapper[4809]: I0930 00:45:03.505869 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09d2f33b-d671-4c21-b1bc-babe71939120-kube-api-access-s44lg" (OuterVolumeSpecName: "kube-api-access-s44lg") pod "09d2f33b-d671-4c21-b1bc-babe71939120" (UID: "09d2f33b-d671-4c21-b1bc-babe71939120"). InnerVolumeSpecName "kube-api-access-s44lg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:45:03 crc kubenswrapper[4809]: I0930 00:45:03.600114 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s44lg\" (UniqueName: \"kubernetes.io/projected/09d2f33b-d671-4c21-b1bc-babe71939120-kube-api-access-s44lg\") on node \"crc\" DevicePath \"\"" Sep 30 00:45:03 crc kubenswrapper[4809]: I0930 00:45:03.600144 4809 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/09d2f33b-d671-4c21-b1bc-babe71939120-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 00:45:03 crc kubenswrapper[4809]: I0930 00:45:03.600154 4809 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/09d2f33b-d671-4c21-b1bc-babe71939120-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 00:45:03 crc kubenswrapper[4809]: I0930 00:45:03.982601 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8" event={"ID":"09d2f33b-d671-4c21-b1bc-babe71939120","Type":"ContainerDied","Data":"c3f90ea54e112002c306fc88abc7744e766af7068cf5bfa4e5dddd6e1636483c"} Sep 30 00:45:03 crc kubenswrapper[4809]: I0930 00:45:03.982661 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3f90ea54e112002c306fc88abc7744e766af7068cf5bfa4e5dddd6e1636483c" Sep 30 00:45:03 crc kubenswrapper[4809]: I0930 00:45:03.982736 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8" Sep 30 00:45:04 crc kubenswrapper[4809]: I0930 00:45:04.470690 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v"] Sep 30 00:45:04 crc kubenswrapper[4809]: I0930 00:45:04.479909 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319840-mxs7v"] Sep 30 00:45:05 crc kubenswrapper[4809]: I0930 00:45:05.707315 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="960a6573-c6e4-49fe-9aff-0c9b43435215" path="/var/lib/kubelet/pods/960a6573-c6e4-49fe-9aff-0c9b43435215/volumes" Sep 30 00:45:15 crc kubenswrapper[4809]: I0930 00:45:15.166102 4809 scope.go:117] "RemoveContainer" containerID="8758d9baefb97fe777fe1c85b8a5e1569b491929745518fe92cb5ac1e892ea06" Sep 30 00:45:15 crc kubenswrapper[4809]: I0930 00:45:15.198676 4809 scope.go:117] "RemoveContainer" containerID="131483e9532964c02cdb7ff8df9f7c01820fb295dc92898e51e714d50a0abbbf" Sep 30 00:45:15 crc kubenswrapper[4809]: I0930 00:45:15.296202 4809 scope.go:117] "RemoveContainer" containerID="f054046a6250eb8ce1aa2af86898bf23a93b8382e08eeb551bc027c2d9b5c345" Sep 30 00:45:25 crc kubenswrapper[4809]: I0930 00:45:25.325436 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:45:25 crc kubenswrapper[4809]: I0930 00:45:25.325950 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:45:55 crc kubenswrapper[4809]: I0930 00:45:55.325166 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:45:55 crc kubenswrapper[4809]: I0930 00:45:55.325921 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:46:16 crc kubenswrapper[4809]: I0930 00:46:16.641996 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2tkjj"] Sep 30 00:46:16 crc kubenswrapper[4809]: E0930 00:46:16.642827 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09d2f33b-d671-4c21-b1bc-babe71939120" containerName="collect-profiles" Sep 30 00:46:16 crc kubenswrapper[4809]: I0930 00:46:16.642839 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="09d2f33b-d671-4c21-b1bc-babe71939120" containerName="collect-profiles" Sep 30 00:46:16 crc kubenswrapper[4809]: I0930 00:46:16.643024 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="09d2f33b-d671-4c21-b1bc-babe71939120" containerName="collect-profiles" Sep 30 00:46:16 crc kubenswrapper[4809]: I0930 00:46:16.646311 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2tkjj" Sep 30 00:46:16 crc kubenswrapper[4809]: I0930 00:46:16.688151 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2tkjj"] Sep 30 00:46:16 crc kubenswrapper[4809]: I0930 00:46:16.828369 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ce4930e-fae6-4308-b1ce-239f37214c19-utilities\") pod \"community-operators-2tkjj\" (UID: \"9ce4930e-fae6-4308-b1ce-239f37214c19\") " pod="openshift-marketplace/community-operators-2tkjj" Sep 30 00:46:16 crc kubenswrapper[4809]: I0930 00:46:16.828740 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94q46\" (UniqueName: \"kubernetes.io/projected/9ce4930e-fae6-4308-b1ce-239f37214c19-kube-api-access-94q46\") pod \"community-operators-2tkjj\" (UID: \"9ce4930e-fae6-4308-b1ce-239f37214c19\") " pod="openshift-marketplace/community-operators-2tkjj" Sep 30 00:46:16 crc kubenswrapper[4809]: I0930 00:46:16.828818 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ce4930e-fae6-4308-b1ce-239f37214c19-catalog-content\") pod \"community-operators-2tkjj\" (UID: \"9ce4930e-fae6-4308-b1ce-239f37214c19\") " pod="openshift-marketplace/community-operators-2tkjj" Sep 30 00:46:16 crc kubenswrapper[4809]: I0930 00:46:16.931844 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ce4930e-fae6-4308-b1ce-239f37214c19-utilities\") pod \"community-operators-2tkjj\" (UID: \"9ce4930e-fae6-4308-b1ce-239f37214c19\") " pod="openshift-marketplace/community-operators-2tkjj" Sep 30 00:46:16 crc kubenswrapper[4809]: I0930 00:46:16.931950 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94q46\" (UniqueName: \"kubernetes.io/projected/9ce4930e-fae6-4308-b1ce-239f37214c19-kube-api-access-94q46\") pod \"community-operators-2tkjj\" (UID: \"9ce4930e-fae6-4308-b1ce-239f37214c19\") " pod="openshift-marketplace/community-operators-2tkjj" Sep 30 00:46:16 crc kubenswrapper[4809]: I0930 00:46:16.932005 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ce4930e-fae6-4308-b1ce-239f37214c19-catalog-content\") pod \"community-operators-2tkjj\" (UID: \"9ce4930e-fae6-4308-b1ce-239f37214c19\") " pod="openshift-marketplace/community-operators-2tkjj" Sep 30 00:46:16 crc kubenswrapper[4809]: I0930 00:46:16.932616 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ce4930e-fae6-4308-b1ce-239f37214c19-utilities\") pod \"community-operators-2tkjj\" (UID: \"9ce4930e-fae6-4308-b1ce-239f37214c19\") " pod="openshift-marketplace/community-operators-2tkjj" Sep 30 00:46:16 crc kubenswrapper[4809]: I0930 00:46:16.932675 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ce4930e-fae6-4308-b1ce-239f37214c19-catalog-content\") pod \"community-operators-2tkjj\" (UID: \"9ce4930e-fae6-4308-b1ce-239f37214c19\") " pod="openshift-marketplace/community-operators-2tkjj" Sep 30 00:46:16 crc kubenswrapper[4809]: I0930 00:46:16.955165 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94q46\" (UniqueName: \"kubernetes.io/projected/9ce4930e-fae6-4308-b1ce-239f37214c19-kube-api-access-94q46\") pod \"community-operators-2tkjj\" (UID: \"9ce4930e-fae6-4308-b1ce-239f37214c19\") " pod="openshift-marketplace/community-operators-2tkjj" Sep 30 00:46:17 crc kubenswrapper[4809]: I0930 00:46:17.014429 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2tkjj" Sep 30 00:46:17 crc kubenswrapper[4809]: I0930 00:46:17.612756 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2tkjj"] Sep 30 00:46:17 crc kubenswrapper[4809]: I0930 00:46:17.781488 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tkjj" event={"ID":"9ce4930e-fae6-4308-b1ce-239f37214c19","Type":"ContainerStarted","Data":"1f6c5642fe414e76d53a1e619161a6f89d5ab9ccb11ff6e02403d058247f0352"} Sep 30 00:46:18 crc kubenswrapper[4809]: I0930 00:46:18.791618 4809 generic.go:334] "Generic (PLEG): container finished" podID="9ce4930e-fae6-4308-b1ce-239f37214c19" containerID="bfe637495677def2274b8ffb52afff729ced5c95abe0f9da8fbd839ce0b93a90" exitCode=0 Sep 30 00:46:18 crc kubenswrapper[4809]: I0930 00:46:18.791680 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tkjj" event={"ID":"9ce4930e-fae6-4308-b1ce-239f37214c19","Type":"ContainerDied","Data":"bfe637495677def2274b8ffb52afff729ced5c95abe0f9da8fbd839ce0b93a90"} Sep 30 00:46:18 crc kubenswrapper[4809]: I0930 00:46:18.794291 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 00:46:19 crc kubenswrapper[4809]: I0930 00:46:19.804578 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tkjj" event={"ID":"9ce4930e-fae6-4308-b1ce-239f37214c19","Type":"ContainerStarted","Data":"c64ffe6468adabaf902078e988ced6c4df780a52c9369c4ecfe7fec1cfe3f138"} Sep 30 00:46:21 crc kubenswrapper[4809]: I0930 00:46:21.832111 4809 generic.go:334] "Generic (PLEG): container finished" podID="9ce4930e-fae6-4308-b1ce-239f37214c19" containerID="c64ffe6468adabaf902078e988ced6c4df780a52c9369c4ecfe7fec1cfe3f138" exitCode=0 Sep 30 00:46:21 crc kubenswrapper[4809]: I0930 00:46:21.832225 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tkjj" event={"ID":"9ce4930e-fae6-4308-b1ce-239f37214c19","Type":"ContainerDied","Data":"c64ffe6468adabaf902078e988ced6c4df780a52c9369c4ecfe7fec1cfe3f138"} Sep 30 00:46:22 crc kubenswrapper[4809]: I0930 00:46:22.852880 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tkjj" event={"ID":"9ce4930e-fae6-4308-b1ce-239f37214c19","Type":"ContainerStarted","Data":"383f4c2a416ca3877824c9ecd1983c276c595b7aebf8e33aff48ecf036abbf30"} Sep 30 00:46:22 crc kubenswrapper[4809]: I0930 00:46:22.893200 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2tkjj" podStartSLOduration=3.396894652 podStartE2EDuration="6.89316371s" podCreationTimestamp="2025-09-30 00:46:16 +0000 UTC" firstStartedPulling="2025-09-30 00:46:18.793972607 +0000 UTC m=+2229.830222025" lastFinishedPulling="2025-09-30 00:46:22.290241635 +0000 UTC m=+2233.326491083" observedRunningTime="2025-09-30 00:46:22.876065555 +0000 UTC m=+2233.912314983" watchObservedRunningTime="2025-09-30 00:46:22.89316371 +0000 UTC m=+2233.929413178" Sep 30 00:46:25 crc kubenswrapper[4809]: I0930 00:46:25.325178 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:46:25 crc kubenswrapper[4809]: I0930 00:46:25.325751 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:46:25 crc kubenswrapper[4809]: I0930 00:46:25.325831 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:46:25 crc kubenswrapper[4809]: I0930 00:46:25.327218 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:46:25 crc kubenswrapper[4809]: I0930 00:46:25.327390 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" gracePeriod=600 Sep 30 00:46:25 crc kubenswrapper[4809]: E0930 00:46:25.459528 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:46:25 crc kubenswrapper[4809]: I0930 00:46:25.905858 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" exitCode=0 Sep 30 00:46:25 crc kubenswrapper[4809]: I0930 00:46:25.905987 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b"} Sep 30 00:46:25 crc kubenswrapper[4809]: I0930 00:46:25.906110 4809 scope.go:117] "RemoveContainer" containerID="13902e7754cc13427dac3722dc2f321035defa3ee2cdd26c1b7de958ff393f88" Sep 30 00:46:25 crc kubenswrapper[4809]: I0930 00:46:25.906857 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:46:25 crc kubenswrapper[4809]: E0930 00:46:25.907183 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:46:27 crc kubenswrapper[4809]: I0930 00:46:27.015733 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2tkjj" Sep 30 00:46:27 crc kubenswrapper[4809]: I0930 00:46:27.016324 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2tkjj" Sep 30 00:46:27 crc kubenswrapper[4809]: I0930 00:46:27.092411 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2tkjj" Sep 30 00:46:28 crc kubenswrapper[4809]: I0930 00:46:28.013907 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2tkjj" Sep 30 00:46:28 crc kubenswrapper[4809]: I0930 00:46:28.069563 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2tkjj"] Sep 30 00:46:29 crc kubenswrapper[4809]: I0930 00:46:29.972004 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2tkjj" podUID="9ce4930e-fae6-4308-b1ce-239f37214c19" containerName="registry-server" containerID="cri-o://383f4c2a416ca3877824c9ecd1983c276c595b7aebf8e33aff48ecf036abbf30" gracePeriod=2 Sep 30 00:46:30 crc kubenswrapper[4809]: I0930 00:46:30.523400 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2tkjj" Sep 30 00:46:30 crc kubenswrapper[4809]: I0930 00:46:30.647859 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ce4930e-fae6-4308-b1ce-239f37214c19-utilities\") pod \"9ce4930e-fae6-4308-b1ce-239f37214c19\" (UID: \"9ce4930e-fae6-4308-b1ce-239f37214c19\") " Sep 30 00:46:30 crc kubenswrapper[4809]: I0930 00:46:30.647926 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94q46\" (UniqueName: \"kubernetes.io/projected/9ce4930e-fae6-4308-b1ce-239f37214c19-kube-api-access-94q46\") pod \"9ce4930e-fae6-4308-b1ce-239f37214c19\" (UID: \"9ce4930e-fae6-4308-b1ce-239f37214c19\") " Sep 30 00:46:30 crc kubenswrapper[4809]: I0930 00:46:30.647973 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ce4930e-fae6-4308-b1ce-239f37214c19-catalog-content\") pod \"9ce4930e-fae6-4308-b1ce-239f37214c19\" (UID: \"9ce4930e-fae6-4308-b1ce-239f37214c19\") " Sep 30 00:46:30 crc kubenswrapper[4809]: I0930 00:46:30.648923 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ce4930e-fae6-4308-b1ce-239f37214c19-utilities" (OuterVolumeSpecName: "utilities") pod "9ce4930e-fae6-4308-b1ce-239f37214c19" (UID: "9ce4930e-fae6-4308-b1ce-239f37214c19"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:46:30 crc kubenswrapper[4809]: I0930 00:46:30.650362 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ce4930e-fae6-4308-b1ce-239f37214c19-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:46:30 crc kubenswrapper[4809]: I0930 00:46:30.653470 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ce4930e-fae6-4308-b1ce-239f37214c19-kube-api-access-94q46" (OuterVolumeSpecName: "kube-api-access-94q46") pod "9ce4930e-fae6-4308-b1ce-239f37214c19" (UID: "9ce4930e-fae6-4308-b1ce-239f37214c19"). InnerVolumeSpecName "kube-api-access-94q46". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:46:30 crc kubenswrapper[4809]: I0930 00:46:30.706394 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ce4930e-fae6-4308-b1ce-239f37214c19-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9ce4930e-fae6-4308-b1ce-239f37214c19" (UID: "9ce4930e-fae6-4308-b1ce-239f37214c19"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:46:30 crc kubenswrapper[4809]: I0930 00:46:30.753815 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94q46\" (UniqueName: \"kubernetes.io/projected/9ce4930e-fae6-4308-b1ce-239f37214c19-kube-api-access-94q46\") on node \"crc\" DevicePath \"\"" Sep 30 00:46:30 crc kubenswrapper[4809]: I0930 00:46:30.753864 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ce4930e-fae6-4308-b1ce-239f37214c19-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:46:30 crc kubenswrapper[4809]: I0930 00:46:30.983544 4809 generic.go:334] "Generic (PLEG): container finished" podID="9ce4930e-fae6-4308-b1ce-239f37214c19" containerID="383f4c2a416ca3877824c9ecd1983c276c595b7aebf8e33aff48ecf036abbf30" exitCode=0 Sep 30 00:46:30 crc kubenswrapper[4809]: I0930 00:46:30.983605 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tkjj" event={"ID":"9ce4930e-fae6-4308-b1ce-239f37214c19","Type":"ContainerDied","Data":"383f4c2a416ca3877824c9ecd1983c276c595b7aebf8e33aff48ecf036abbf30"} Sep 30 00:46:30 crc kubenswrapper[4809]: I0930 00:46:30.983675 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tkjj" event={"ID":"9ce4930e-fae6-4308-b1ce-239f37214c19","Type":"ContainerDied","Data":"1f6c5642fe414e76d53a1e619161a6f89d5ab9ccb11ff6e02403d058247f0352"} Sep 30 00:46:30 crc kubenswrapper[4809]: I0930 00:46:30.983705 4809 scope.go:117] "RemoveContainer" containerID="383f4c2a416ca3877824c9ecd1983c276c595b7aebf8e33aff48ecf036abbf30" Sep 30 00:46:30 crc kubenswrapper[4809]: I0930 00:46:30.984888 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2tkjj" Sep 30 00:46:31 crc kubenswrapper[4809]: I0930 00:46:31.012080 4809 scope.go:117] "RemoveContainer" containerID="c64ffe6468adabaf902078e988ced6c4df780a52c9369c4ecfe7fec1cfe3f138" Sep 30 00:46:31 crc kubenswrapper[4809]: I0930 00:46:31.039297 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2tkjj"] Sep 30 00:46:31 crc kubenswrapper[4809]: I0930 00:46:31.053377 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2tkjj"] Sep 30 00:46:31 crc kubenswrapper[4809]: I0930 00:46:31.058774 4809 scope.go:117] "RemoveContainer" containerID="bfe637495677def2274b8ffb52afff729ced5c95abe0f9da8fbd839ce0b93a90" Sep 30 00:46:31 crc kubenswrapper[4809]: I0930 00:46:31.088066 4809 scope.go:117] "RemoveContainer" containerID="383f4c2a416ca3877824c9ecd1983c276c595b7aebf8e33aff48ecf036abbf30" Sep 30 00:46:31 crc kubenswrapper[4809]: E0930 00:46:31.088546 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"383f4c2a416ca3877824c9ecd1983c276c595b7aebf8e33aff48ecf036abbf30\": container with ID starting with 383f4c2a416ca3877824c9ecd1983c276c595b7aebf8e33aff48ecf036abbf30 not found: ID does not exist" containerID="383f4c2a416ca3877824c9ecd1983c276c595b7aebf8e33aff48ecf036abbf30" Sep 30 00:46:31 crc kubenswrapper[4809]: I0930 00:46:31.088751 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"383f4c2a416ca3877824c9ecd1983c276c595b7aebf8e33aff48ecf036abbf30"} err="failed to get container status \"383f4c2a416ca3877824c9ecd1983c276c595b7aebf8e33aff48ecf036abbf30\": rpc error: code = NotFound desc = could not find container \"383f4c2a416ca3877824c9ecd1983c276c595b7aebf8e33aff48ecf036abbf30\": container with ID starting with 383f4c2a416ca3877824c9ecd1983c276c595b7aebf8e33aff48ecf036abbf30 not found: ID does not exist" Sep 30 00:46:31 crc kubenswrapper[4809]: I0930 00:46:31.088867 4809 scope.go:117] "RemoveContainer" containerID="c64ffe6468adabaf902078e988ced6c4df780a52c9369c4ecfe7fec1cfe3f138" Sep 30 00:46:31 crc kubenswrapper[4809]: E0930 00:46:31.089265 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c64ffe6468adabaf902078e988ced6c4df780a52c9369c4ecfe7fec1cfe3f138\": container with ID starting with c64ffe6468adabaf902078e988ced6c4df780a52c9369c4ecfe7fec1cfe3f138 not found: ID does not exist" containerID="c64ffe6468adabaf902078e988ced6c4df780a52c9369c4ecfe7fec1cfe3f138" Sep 30 00:46:31 crc kubenswrapper[4809]: I0930 00:46:31.089384 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c64ffe6468adabaf902078e988ced6c4df780a52c9369c4ecfe7fec1cfe3f138"} err="failed to get container status \"c64ffe6468adabaf902078e988ced6c4df780a52c9369c4ecfe7fec1cfe3f138\": rpc error: code = NotFound desc = could not find container \"c64ffe6468adabaf902078e988ced6c4df780a52c9369c4ecfe7fec1cfe3f138\": container with ID starting with c64ffe6468adabaf902078e988ced6c4df780a52c9369c4ecfe7fec1cfe3f138 not found: ID does not exist" Sep 30 00:46:31 crc kubenswrapper[4809]: I0930 00:46:31.089492 4809 scope.go:117] "RemoveContainer" containerID="bfe637495677def2274b8ffb52afff729ced5c95abe0f9da8fbd839ce0b93a90" Sep 30 00:46:31 crc kubenswrapper[4809]: E0930 00:46:31.091046 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfe637495677def2274b8ffb52afff729ced5c95abe0f9da8fbd839ce0b93a90\": container with ID starting with bfe637495677def2274b8ffb52afff729ced5c95abe0f9da8fbd839ce0b93a90 not found: ID does not exist" containerID="bfe637495677def2274b8ffb52afff729ced5c95abe0f9da8fbd839ce0b93a90" Sep 30 00:46:31 crc kubenswrapper[4809]: I0930 00:46:31.091069 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfe637495677def2274b8ffb52afff729ced5c95abe0f9da8fbd839ce0b93a90"} err="failed to get container status \"bfe637495677def2274b8ffb52afff729ced5c95abe0f9da8fbd839ce0b93a90\": rpc error: code = NotFound desc = could not find container \"bfe637495677def2274b8ffb52afff729ced5c95abe0f9da8fbd839ce0b93a90\": container with ID starting with bfe637495677def2274b8ffb52afff729ced5c95abe0f9da8fbd839ce0b93a90 not found: ID does not exist" Sep 30 00:46:31 crc kubenswrapper[4809]: I0930 00:46:31.706541 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ce4930e-fae6-4308-b1ce-239f37214c19" path="/var/lib/kubelet/pods/9ce4930e-fae6-4308-b1ce-239f37214c19/volumes" Sep 30 00:46:39 crc kubenswrapper[4809]: I0930 00:46:39.712056 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:46:39 crc kubenswrapper[4809]: E0930 00:46:39.713482 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:46:50 crc kubenswrapper[4809]: I0930 00:46:50.691739 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:46:50 crc kubenswrapper[4809]: E0930 00:46:50.693386 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:47:04 crc kubenswrapper[4809]: I0930 00:47:04.692485 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:47:04 crc kubenswrapper[4809]: E0930 00:47:04.693920 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:47:06 crc kubenswrapper[4809]: I0930 00:47:06.144513 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7n6c2"] Sep 30 00:47:06 crc kubenswrapper[4809]: E0930 00:47:06.145205 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ce4930e-fae6-4308-b1ce-239f37214c19" containerName="extract-content" Sep 30 00:47:06 crc kubenswrapper[4809]: I0930 00:47:06.145221 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ce4930e-fae6-4308-b1ce-239f37214c19" containerName="extract-content" Sep 30 00:47:06 crc kubenswrapper[4809]: E0930 00:47:06.145255 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ce4930e-fae6-4308-b1ce-239f37214c19" containerName="extract-utilities" Sep 30 00:47:06 crc kubenswrapper[4809]: I0930 00:47:06.145264 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ce4930e-fae6-4308-b1ce-239f37214c19" containerName="extract-utilities" Sep 30 00:47:06 crc kubenswrapper[4809]: E0930 00:47:06.145290 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ce4930e-fae6-4308-b1ce-239f37214c19" containerName="registry-server" Sep 30 00:47:06 crc kubenswrapper[4809]: I0930 00:47:06.145299 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ce4930e-fae6-4308-b1ce-239f37214c19" containerName="registry-server" Sep 30 00:47:06 crc kubenswrapper[4809]: I0930 00:47:06.145585 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ce4930e-fae6-4308-b1ce-239f37214c19" containerName="registry-server" Sep 30 00:47:06 crc kubenswrapper[4809]: I0930 00:47:06.147588 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7n6c2" Sep 30 00:47:06 crc kubenswrapper[4809]: I0930 00:47:06.156279 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7n6c2"] Sep 30 00:47:06 crc kubenswrapper[4809]: I0930 00:47:06.194950 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91e7cf07-c909-400b-b234-8826c1181d9c-utilities\") pod \"redhat-operators-7n6c2\" (UID: \"91e7cf07-c909-400b-b234-8826c1181d9c\") " pod="openshift-marketplace/redhat-operators-7n6c2" Sep 30 00:47:06 crc kubenswrapper[4809]: I0930 00:47:06.195305 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thx66\" (UniqueName: \"kubernetes.io/projected/91e7cf07-c909-400b-b234-8826c1181d9c-kube-api-access-thx66\") pod \"redhat-operators-7n6c2\" (UID: \"91e7cf07-c909-400b-b234-8826c1181d9c\") " pod="openshift-marketplace/redhat-operators-7n6c2" Sep 30 00:47:06 crc kubenswrapper[4809]: I0930 00:47:06.195497 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91e7cf07-c909-400b-b234-8826c1181d9c-catalog-content\") pod \"redhat-operators-7n6c2\" (UID: \"91e7cf07-c909-400b-b234-8826c1181d9c\") " pod="openshift-marketplace/redhat-operators-7n6c2" Sep 30 00:47:06 crc kubenswrapper[4809]: I0930 00:47:06.297798 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91e7cf07-c909-400b-b234-8826c1181d9c-utilities\") pod \"redhat-operators-7n6c2\" (UID: \"91e7cf07-c909-400b-b234-8826c1181d9c\") " pod="openshift-marketplace/redhat-operators-7n6c2" Sep 30 00:47:06 crc kubenswrapper[4809]: I0930 00:47:06.298237 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thx66\" (UniqueName: \"kubernetes.io/projected/91e7cf07-c909-400b-b234-8826c1181d9c-kube-api-access-thx66\") pod \"redhat-operators-7n6c2\" (UID: \"91e7cf07-c909-400b-b234-8826c1181d9c\") " pod="openshift-marketplace/redhat-operators-7n6c2" Sep 30 00:47:06 crc kubenswrapper[4809]: I0930 00:47:06.298347 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91e7cf07-c909-400b-b234-8826c1181d9c-catalog-content\") pod \"redhat-operators-7n6c2\" (UID: \"91e7cf07-c909-400b-b234-8826c1181d9c\") " pod="openshift-marketplace/redhat-operators-7n6c2" Sep 30 00:47:06 crc kubenswrapper[4809]: I0930 00:47:06.299451 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91e7cf07-c909-400b-b234-8826c1181d9c-catalog-content\") pod \"redhat-operators-7n6c2\" (UID: \"91e7cf07-c909-400b-b234-8826c1181d9c\") " pod="openshift-marketplace/redhat-operators-7n6c2" Sep 30 00:47:06 crc kubenswrapper[4809]: I0930 00:47:06.299796 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91e7cf07-c909-400b-b234-8826c1181d9c-utilities\") pod \"redhat-operators-7n6c2\" (UID: \"91e7cf07-c909-400b-b234-8826c1181d9c\") " pod="openshift-marketplace/redhat-operators-7n6c2" Sep 30 00:47:06 crc kubenswrapper[4809]: I0930 00:47:06.323557 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thx66\" (UniqueName: \"kubernetes.io/projected/91e7cf07-c909-400b-b234-8826c1181d9c-kube-api-access-thx66\") pod \"redhat-operators-7n6c2\" (UID: \"91e7cf07-c909-400b-b234-8826c1181d9c\") " pod="openshift-marketplace/redhat-operators-7n6c2" Sep 30 00:47:06 crc kubenswrapper[4809]: I0930 00:47:06.471333 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7n6c2" Sep 30 00:47:06 crc kubenswrapper[4809]: I0930 00:47:06.938929 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7n6c2"] Sep 30 00:47:07 crc kubenswrapper[4809]: I0930 00:47:07.433230 4809 generic.go:334] "Generic (PLEG): container finished" podID="91e7cf07-c909-400b-b234-8826c1181d9c" containerID="92dc5901a81e26727af88a18a94b42f346f5f4a1fdf6e8e6227a6a45f726dde9" exitCode=0 Sep 30 00:47:07 crc kubenswrapper[4809]: I0930 00:47:07.433287 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7n6c2" event={"ID":"91e7cf07-c909-400b-b234-8826c1181d9c","Type":"ContainerDied","Data":"92dc5901a81e26727af88a18a94b42f346f5f4a1fdf6e8e6227a6a45f726dde9"} Sep 30 00:47:07 crc kubenswrapper[4809]: I0930 00:47:07.433523 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7n6c2" event={"ID":"91e7cf07-c909-400b-b234-8826c1181d9c","Type":"ContainerStarted","Data":"f05a66ca0278e6fb6b1aa01ce24d4f45ea7b7c1021e18f8ce0ffec7abe01bf83"} Sep 30 00:47:09 crc kubenswrapper[4809]: I0930 00:47:09.460127 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7n6c2" event={"ID":"91e7cf07-c909-400b-b234-8826c1181d9c","Type":"ContainerStarted","Data":"017153869d8f450417eae2ad1e08cb9d755b368d3428b54d839e069501291629"} Sep 30 00:47:12 crc kubenswrapper[4809]: I0930 00:47:12.495135 4809 generic.go:334] "Generic (PLEG): container finished" podID="91e7cf07-c909-400b-b234-8826c1181d9c" containerID="017153869d8f450417eae2ad1e08cb9d755b368d3428b54d839e069501291629" exitCode=0 Sep 30 00:47:12 crc kubenswrapper[4809]: I0930 00:47:12.495225 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7n6c2" event={"ID":"91e7cf07-c909-400b-b234-8826c1181d9c","Type":"ContainerDied","Data":"017153869d8f450417eae2ad1e08cb9d755b368d3428b54d839e069501291629"} Sep 30 00:47:13 crc kubenswrapper[4809]: I0930 00:47:13.512196 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7n6c2" event={"ID":"91e7cf07-c909-400b-b234-8826c1181d9c","Type":"ContainerStarted","Data":"b193c0b2d8199a80e0c5c3b9cf721fc7f80d101a2fccde12eb601ac1f4c3e3bd"} Sep 30 00:47:15 crc kubenswrapper[4809]: I0930 00:47:15.691449 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:47:15 crc kubenswrapper[4809]: E0930 00:47:15.692142 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:47:16 crc kubenswrapper[4809]: I0930 00:47:16.471919 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7n6c2" Sep 30 00:47:16 crc kubenswrapper[4809]: I0930 00:47:16.472021 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7n6c2" Sep 30 00:47:17 crc kubenswrapper[4809]: I0930 00:47:17.527228 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-7n6c2" podUID="91e7cf07-c909-400b-b234-8826c1181d9c" containerName="registry-server" probeResult="failure" output=< Sep 30 00:47:17 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 00:47:17 crc kubenswrapper[4809]: > Sep 30 00:47:26 crc kubenswrapper[4809]: I0930 00:47:26.564229 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7n6c2" Sep 30 00:47:26 crc kubenswrapper[4809]: I0930 00:47:26.604259 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7n6c2" podStartSLOduration=15.049674849 podStartE2EDuration="20.604229057s" podCreationTimestamp="2025-09-30 00:47:06 +0000 UTC" firstStartedPulling="2025-09-30 00:47:07.435254244 +0000 UTC m=+2278.471503652" lastFinishedPulling="2025-09-30 00:47:12.989808442 +0000 UTC m=+2284.026057860" observedRunningTime="2025-09-30 00:47:13.544044386 +0000 UTC m=+2284.580293814" watchObservedRunningTime="2025-09-30 00:47:26.604229057 +0000 UTC m=+2297.640478505" Sep 30 00:47:26 crc kubenswrapper[4809]: I0930 00:47:26.633799 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7n6c2" Sep 30 00:47:26 crc kubenswrapper[4809]: I0930 00:47:26.804259 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7n6c2"] Sep 30 00:47:27 crc kubenswrapper[4809]: I0930 00:47:27.653598 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7n6c2" podUID="91e7cf07-c909-400b-b234-8826c1181d9c" containerName="registry-server" containerID="cri-o://b193c0b2d8199a80e0c5c3b9cf721fc7f80d101a2fccde12eb601ac1f4c3e3bd" gracePeriod=2 Sep 30 00:47:27 crc kubenswrapper[4809]: I0930 00:47:27.691473 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:47:27 crc kubenswrapper[4809]: E0930 00:47:27.692026 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.151977 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7n6c2" Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.229394 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91e7cf07-c909-400b-b234-8826c1181d9c-utilities\") pod \"91e7cf07-c909-400b-b234-8826c1181d9c\" (UID: \"91e7cf07-c909-400b-b234-8826c1181d9c\") " Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.230312 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91e7cf07-c909-400b-b234-8826c1181d9c-utilities" (OuterVolumeSpecName: "utilities") pod "91e7cf07-c909-400b-b234-8826c1181d9c" (UID: "91e7cf07-c909-400b-b234-8826c1181d9c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.330635 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thx66\" (UniqueName: \"kubernetes.io/projected/91e7cf07-c909-400b-b234-8826c1181d9c-kube-api-access-thx66\") pod \"91e7cf07-c909-400b-b234-8826c1181d9c\" (UID: \"91e7cf07-c909-400b-b234-8826c1181d9c\") " Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.330733 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91e7cf07-c909-400b-b234-8826c1181d9c-catalog-content\") pod \"91e7cf07-c909-400b-b234-8826c1181d9c\" (UID: \"91e7cf07-c909-400b-b234-8826c1181d9c\") " Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.331145 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91e7cf07-c909-400b-b234-8826c1181d9c-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.336743 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91e7cf07-c909-400b-b234-8826c1181d9c-kube-api-access-thx66" (OuterVolumeSpecName: "kube-api-access-thx66") pod "91e7cf07-c909-400b-b234-8826c1181d9c" (UID: "91e7cf07-c909-400b-b234-8826c1181d9c"). InnerVolumeSpecName "kube-api-access-thx66". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.415361 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91e7cf07-c909-400b-b234-8826c1181d9c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "91e7cf07-c909-400b-b234-8826c1181d9c" (UID: "91e7cf07-c909-400b-b234-8826c1181d9c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.433219 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thx66\" (UniqueName: \"kubernetes.io/projected/91e7cf07-c909-400b-b234-8826c1181d9c-kube-api-access-thx66\") on node \"crc\" DevicePath \"\"" Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.433282 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91e7cf07-c909-400b-b234-8826c1181d9c-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.664123 4809 generic.go:334] "Generic (PLEG): container finished" podID="91e7cf07-c909-400b-b234-8826c1181d9c" containerID="b193c0b2d8199a80e0c5c3b9cf721fc7f80d101a2fccde12eb601ac1f4c3e3bd" exitCode=0 Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.664169 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7n6c2" event={"ID":"91e7cf07-c909-400b-b234-8826c1181d9c","Type":"ContainerDied","Data":"b193c0b2d8199a80e0c5c3b9cf721fc7f80d101a2fccde12eb601ac1f4c3e3bd"} Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.664199 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7n6c2" event={"ID":"91e7cf07-c909-400b-b234-8826c1181d9c","Type":"ContainerDied","Data":"f05a66ca0278e6fb6b1aa01ce24d4f45ea7b7c1021e18f8ce0ffec7abe01bf83"} Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.664220 4809 scope.go:117] "RemoveContainer" containerID="b193c0b2d8199a80e0c5c3b9cf721fc7f80d101a2fccde12eb601ac1f4c3e3bd" Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.664353 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7n6c2" Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.697892 4809 scope.go:117] "RemoveContainer" containerID="017153869d8f450417eae2ad1e08cb9d755b368d3428b54d839e069501291629" Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.708585 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7n6c2"] Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.737509 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7n6c2"] Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.740973 4809 scope.go:117] "RemoveContainer" containerID="92dc5901a81e26727af88a18a94b42f346f5f4a1fdf6e8e6227a6a45f726dde9" Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.777166 4809 scope.go:117] "RemoveContainer" containerID="b193c0b2d8199a80e0c5c3b9cf721fc7f80d101a2fccde12eb601ac1f4c3e3bd" Sep 30 00:47:28 crc kubenswrapper[4809]: E0930 00:47:28.777735 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b193c0b2d8199a80e0c5c3b9cf721fc7f80d101a2fccde12eb601ac1f4c3e3bd\": container with ID starting with b193c0b2d8199a80e0c5c3b9cf721fc7f80d101a2fccde12eb601ac1f4c3e3bd not found: ID does not exist" containerID="b193c0b2d8199a80e0c5c3b9cf721fc7f80d101a2fccde12eb601ac1f4c3e3bd" Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.777882 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b193c0b2d8199a80e0c5c3b9cf721fc7f80d101a2fccde12eb601ac1f4c3e3bd"} err="failed to get container status \"b193c0b2d8199a80e0c5c3b9cf721fc7f80d101a2fccde12eb601ac1f4c3e3bd\": rpc error: code = NotFound desc = could not find container \"b193c0b2d8199a80e0c5c3b9cf721fc7f80d101a2fccde12eb601ac1f4c3e3bd\": container with ID starting with b193c0b2d8199a80e0c5c3b9cf721fc7f80d101a2fccde12eb601ac1f4c3e3bd not found: ID does not exist" Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.777998 4809 scope.go:117] "RemoveContainer" containerID="017153869d8f450417eae2ad1e08cb9d755b368d3428b54d839e069501291629" Sep 30 00:47:28 crc kubenswrapper[4809]: E0930 00:47:28.778433 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"017153869d8f450417eae2ad1e08cb9d755b368d3428b54d839e069501291629\": container with ID starting with 017153869d8f450417eae2ad1e08cb9d755b368d3428b54d839e069501291629 not found: ID does not exist" containerID="017153869d8f450417eae2ad1e08cb9d755b368d3428b54d839e069501291629" Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.778480 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"017153869d8f450417eae2ad1e08cb9d755b368d3428b54d839e069501291629"} err="failed to get container status \"017153869d8f450417eae2ad1e08cb9d755b368d3428b54d839e069501291629\": rpc error: code = NotFound desc = could not find container \"017153869d8f450417eae2ad1e08cb9d755b368d3428b54d839e069501291629\": container with ID starting with 017153869d8f450417eae2ad1e08cb9d755b368d3428b54d839e069501291629 not found: ID does not exist" Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.778509 4809 scope.go:117] "RemoveContainer" containerID="92dc5901a81e26727af88a18a94b42f346f5f4a1fdf6e8e6227a6a45f726dde9" Sep 30 00:47:28 crc kubenswrapper[4809]: E0930 00:47:28.779084 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92dc5901a81e26727af88a18a94b42f346f5f4a1fdf6e8e6227a6a45f726dde9\": container with ID starting with 92dc5901a81e26727af88a18a94b42f346f5f4a1fdf6e8e6227a6a45f726dde9 not found: ID does not exist" containerID="92dc5901a81e26727af88a18a94b42f346f5f4a1fdf6e8e6227a6a45f726dde9" Sep 30 00:47:28 crc kubenswrapper[4809]: I0930 00:47:28.779107 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92dc5901a81e26727af88a18a94b42f346f5f4a1fdf6e8e6227a6a45f726dde9"} err="failed to get container status \"92dc5901a81e26727af88a18a94b42f346f5f4a1fdf6e8e6227a6a45f726dde9\": rpc error: code = NotFound desc = could not find container \"92dc5901a81e26727af88a18a94b42f346f5f4a1fdf6e8e6227a6a45f726dde9\": container with ID starting with 92dc5901a81e26727af88a18a94b42f346f5f4a1fdf6e8e6227a6a45f726dde9 not found: ID does not exist" Sep 30 00:47:29 crc kubenswrapper[4809]: I0930 00:47:29.725816 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91e7cf07-c909-400b-b234-8826c1181d9c" path="/var/lib/kubelet/pods/91e7cf07-c909-400b-b234-8826c1181d9c/volumes" Sep 30 00:47:40 crc kubenswrapper[4809]: I0930 00:47:40.691843 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:47:40 crc kubenswrapper[4809]: E0930 00:47:40.692690 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:47:55 crc kubenswrapper[4809]: I0930 00:47:55.691980 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:47:55 crc kubenswrapper[4809]: E0930 00:47:55.693144 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:48:09 crc kubenswrapper[4809]: I0930 00:48:09.700698 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:48:09 crc kubenswrapper[4809]: E0930 00:48:09.701683 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:48:23 crc kubenswrapper[4809]: I0930 00:48:23.692777 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:48:23 crc kubenswrapper[4809]: E0930 00:48:23.694243 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:48:36 crc kubenswrapper[4809]: I0930 00:48:36.691583 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:48:36 crc kubenswrapper[4809]: E0930 00:48:36.692431 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:48:44 crc kubenswrapper[4809]: I0930 00:48:44.570053 4809 generic.go:334] "Generic (PLEG): container finished" podID="8cec48e7-b769-4dc4-9e71-14a237f8aab8" containerID="b141a3642a92ae95db9d4a6e493fe12dbd4a13a55a52fc01d81cf07fe7bf6f06" exitCode=0 Sep 30 00:48:44 crc kubenswrapper[4809]: I0930 00:48:44.570127 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" event={"ID":"8cec48e7-b769-4dc4-9e71-14a237f8aab8","Type":"ContainerDied","Data":"b141a3642a92ae95db9d4a6e493fe12dbd4a13a55a52fc01d81cf07fe7bf6f06"} Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.070324 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.205940 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-libvirt-combined-ca-bundle\") pod \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\" (UID: \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\") " Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.207741 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-inventory\") pod \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\" (UID: \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\") " Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.207828 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-ssh-key\") pod \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\" (UID: \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\") " Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.207918 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5k47l\" (UniqueName: \"kubernetes.io/projected/8cec48e7-b769-4dc4-9e71-14a237f8aab8-kube-api-access-5k47l\") pod \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\" (UID: \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\") " Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.208058 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-libvirt-secret-0\") pod \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\" (UID: \"8cec48e7-b769-4dc4-9e71-14a237f8aab8\") " Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.213231 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "8cec48e7-b769-4dc4-9e71-14a237f8aab8" (UID: "8cec48e7-b769-4dc4-9e71-14a237f8aab8"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.215129 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cec48e7-b769-4dc4-9e71-14a237f8aab8-kube-api-access-5k47l" (OuterVolumeSpecName: "kube-api-access-5k47l") pod "8cec48e7-b769-4dc4-9e71-14a237f8aab8" (UID: "8cec48e7-b769-4dc4-9e71-14a237f8aab8"). InnerVolumeSpecName "kube-api-access-5k47l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.237224 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8cec48e7-b769-4dc4-9e71-14a237f8aab8" (UID: "8cec48e7-b769-4dc4-9e71-14a237f8aab8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.239558 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-inventory" (OuterVolumeSpecName: "inventory") pod "8cec48e7-b769-4dc4-9e71-14a237f8aab8" (UID: "8cec48e7-b769-4dc4-9e71-14a237f8aab8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.255158 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "8cec48e7-b769-4dc4-9e71-14a237f8aab8" (UID: "8cec48e7-b769-4dc4-9e71-14a237f8aab8"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.311604 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.311714 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5k47l\" (UniqueName: \"kubernetes.io/projected/8cec48e7-b769-4dc4-9e71-14a237f8aab8-kube-api-access-5k47l\") on node \"crc\" DevicePath \"\"" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.311749 4809 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.311773 4809 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.311800 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cec48e7-b769-4dc4-9e71-14a237f8aab8-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.592681 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" event={"ID":"8cec48e7-b769-4dc4-9e71-14a237f8aab8","Type":"ContainerDied","Data":"d904a6a55bc58c694629f9989f83034e36718fc571654eab8ac24e4b93219cbf"} Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.592716 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d904a6a55bc58c694629f9989f83034e36718fc571654eab8ac24e4b93219cbf" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.592766 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.705280 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5"] Sep 30 00:48:46 crc kubenswrapper[4809]: E0930 00:48:46.705828 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91e7cf07-c909-400b-b234-8826c1181d9c" containerName="extract-content" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.705853 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="91e7cf07-c909-400b-b234-8826c1181d9c" containerName="extract-content" Sep 30 00:48:46 crc kubenswrapper[4809]: E0930 00:48:46.705885 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91e7cf07-c909-400b-b234-8826c1181d9c" containerName="registry-server" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.705895 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="91e7cf07-c909-400b-b234-8826c1181d9c" containerName="registry-server" Sep 30 00:48:46 crc kubenswrapper[4809]: E0930 00:48:46.705970 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cec48e7-b769-4dc4-9e71-14a237f8aab8" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.705980 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cec48e7-b769-4dc4-9e71-14a237f8aab8" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 30 00:48:46 crc kubenswrapper[4809]: E0930 00:48:46.706000 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91e7cf07-c909-400b-b234-8826c1181d9c" containerName="extract-utilities" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.706011 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="91e7cf07-c909-400b-b234-8826c1181d9c" containerName="extract-utilities" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.706262 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="91e7cf07-c909-400b-b234-8826c1181d9c" containerName="registry-server" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.706289 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cec48e7-b769-4dc4-9e71-14a237f8aab8" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.707088 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.709914 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.710072 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.710358 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.712487 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.720507 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.720594 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjzb5\" (UniqueName: \"kubernetes.io/projected/2300c556-cca3-451c-bbf5-1f1dd17a3d41-kube-api-access-cjzb5\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.720688 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.720730 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.720765 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.720815 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.720891 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.731625 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.751754 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5"] Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.821832 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.821927 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.821977 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.822021 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.822077 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.822132 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.822195 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjzb5\" (UniqueName: \"kubernetes.io/projected/2300c556-cca3-451c-bbf5-1f1dd17a3d41-kube-api-access-cjzb5\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.826493 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.826628 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.826944 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.827254 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.834242 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.836134 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:46 crc kubenswrapper[4809]: I0930 00:48:46.838089 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjzb5\" (UniqueName: \"kubernetes.io/projected/2300c556-cca3-451c-bbf5-1f1dd17a3d41-kube-api-access-cjzb5\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:47 crc kubenswrapper[4809]: I0930 00:48:47.061365 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:48:47 crc kubenswrapper[4809]: I0930 00:48:47.623239 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5"] Sep 30 00:48:47 crc kubenswrapper[4809]: I0930 00:48:47.690949 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:48:47 crc kubenswrapper[4809]: E0930 00:48:47.691225 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:48:48 crc kubenswrapper[4809]: I0930 00:48:48.614062 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" event={"ID":"2300c556-cca3-451c-bbf5-1f1dd17a3d41","Type":"ContainerStarted","Data":"ea71be0e0c8811aaaf144e5c05b31203afe6efa00c7dcdf030858fb28ebbc995"} Sep 30 00:48:48 crc kubenswrapper[4809]: I0930 00:48:48.614446 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" event={"ID":"2300c556-cca3-451c-bbf5-1f1dd17a3d41","Type":"ContainerStarted","Data":"3a6a01de42a1b5364e692c5e549223e5d88934b2a0d892da1eb4a56db811d7f1"} Sep 30 00:48:48 crc kubenswrapper[4809]: I0930 00:48:48.638328 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" podStartSLOduration=2.081719295 podStartE2EDuration="2.638307635s" podCreationTimestamp="2025-09-30 00:48:46 +0000 UTC" firstStartedPulling="2025-09-30 00:48:47.61534423 +0000 UTC m=+2378.651593648" lastFinishedPulling="2025-09-30 00:48:48.17193256 +0000 UTC m=+2379.208181988" observedRunningTime="2025-09-30 00:48:48.635322382 +0000 UTC m=+2379.671571810" watchObservedRunningTime="2025-09-30 00:48:48.638307635 +0000 UTC m=+2379.674557043" Sep 30 00:48:58 crc kubenswrapper[4809]: I0930 00:48:58.691392 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:48:58 crc kubenswrapper[4809]: E0930 00:48:58.692206 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:49:11 crc kubenswrapper[4809]: I0930 00:49:11.691726 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:49:11 crc kubenswrapper[4809]: E0930 00:49:11.692806 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:49:23 crc kubenswrapper[4809]: I0930 00:49:23.691212 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:49:23 crc kubenswrapper[4809]: E0930 00:49:23.692170 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:49:36 crc kubenswrapper[4809]: I0930 00:49:36.691839 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:49:36 crc kubenswrapper[4809]: E0930 00:49:36.692782 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:49:51 crc kubenswrapper[4809]: I0930 00:49:51.691294 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:49:51 crc kubenswrapper[4809]: E0930 00:49:51.693181 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:50:03 crc kubenswrapper[4809]: I0930 00:50:03.690833 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:50:03 crc kubenswrapper[4809]: E0930 00:50:03.692722 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:50:15 crc kubenswrapper[4809]: I0930 00:50:15.691395 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:50:15 crc kubenswrapper[4809]: E0930 00:50:15.692183 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:50:29 crc kubenswrapper[4809]: I0930 00:50:29.700724 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:50:29 crc kubenswrapper[4809]: E0930 00:50:29.706336 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:50:43 crc kubenswrapper[4809]: I0930 00:50:43.690836 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:50:43 crc kubenswrapper[4809]: E0930 00:50:43.691664 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:50:57 crc kubenswrapper[4809]: I0930 00:50:57.690421 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:50:57 crc kubenswrapper[4809]: E0930 00:50:57.691916 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:51:09 crc kubenswrapper[4809]: I0930 00:51:09.699532 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:51:09 crc kubenswrapper[4809]: E0930 00:51:09.701947 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:51:20 crc kubenswrapper[4809]: I0930 00:51:20.690263 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:51:20 crc kubenswrapper[4809]: E0930 00:51:20.690988 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:51:31 crc kubenswrapper[4809]: I0930 00:51:31.691364 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:51:32 crc kubenswrapper[4809]: I0930 00:51:32.398358 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"038b8c1d2776a46501753a414c77ba4b88fa694557870a9e8ef4fc83907e99ed"} Sep 30 00:51:37 crc kubenswrapper[4809]: I0930 00:51:37.454715 4809 generic.go:334] "Generic (PLEG): container finished" podID="2300c556-cca3-451c-bbf5-1f1dd17a3d41" containerID="ea71be0e0c8811aaaf144e5c05b31203afe6efa00c7dcdf030858fb28ebbc995" exitCode=0 Sep 30 00:51:37 crc kubenswrapper[4809]: I0930 00:51:37.454783 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" event={"ID":"2300c556-cca3-451c-bbf5-1f1dd17a3d41","Type":"ContainerDied","Data":"ea71be0e0c8811aaaf144e5c05b31203afe6efa00c7dcdf030858fb28ebbc995"} Sep 30 00:51:38 crc kubenswrapper[4809]: I0930 00:51:38.955941 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.096229 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ceilometer-compute-config-data-2\") pod \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.096514 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-telemetry-combined-ca-bundle\") pod \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.096540 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-inventory\") pod \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.096578 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ceilometer-compute-config-data-1\") pod \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.096757 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ceilometer-compute-config-data-0\") pod \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.096843 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cjzb5\" (UniqueName: \"kubernetes.io/projected/2300c556-cca3-451c-bbf5-1f1dd17a3d41-kube-api-access-cjzb5\") pod \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.096863 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ssh-key\") pod \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\" (UID: \"2300c556-cca3-451c-bbf5-1f1dd17a3d41\") " Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.105662 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "2300c556-cca3-451c-bbf5-1f1dd17a3d41" (UID: "2300c556-cca3-451c-bbf5-1f1dd17a3d41"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.112864 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2300c556-cca3-451c-bbf5-1f1dd17a3d41-kube-api-access-cjzb5" (OuterVolumeSpecName: "kube-api-access-cjzb5") pod "2300c556-cca3-451c-bbf5-1f1dd17a3d41" (UID: "2300c556-cca3-451c-bbf5-1f1dd17a3d41"). InnerVolumeSpecName "kube-api-access-cjzb5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.126860 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2300c556-cca3-451c-bbf5-1f1dd17a3d41" (UID: "2300c556-cca3-451c-bbf5-1f1dd17a3d41"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.126880 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "2300c556-cca3-451c-bbf5-1f1dd17a3d41" (UID: "2300c556-cca3-451c-bbf5-1f1dd17a3d41"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.134427 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "2300c556-cca3-451c-bbf5-1f1dd17a3d41" (UID: "2300c556-cca3-451c-bbf5-1f1dd17a3d41"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.135821 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-inventory" (OuterVolumeSpecName: "inventory") pod "2300c556-cca3-451c-bbf5-1f1dd17a3d41" (UID: "2300c556-cca3-451c-bbf5-1f1dd17a3d41"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.142152 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "2300c556-cca3-451c-bbf5-1f1dd17a3d41" (UID: "2300c556-cca3-451c-bbf5-1f1dd17a3d41"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.199263 4809 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.199316 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cjzb5\" (UniqueName: \"kubernetes.io/projected/2300c556-cca3-451c-bbf5-1f1dd17a3d41-kube-api-access-cjzb5\") on node \"crc\" DevicePath \"\"" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.199332 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.199349 4809 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.199363 4809 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.199376 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.199389 4809 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/2300c556-cca3-451c-bbf5-1f1dd17a3d41-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.478331 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" event={"ID":"2300c556-cca3-451c-bbf5-1f1dd17a3d41","Type":"ContainerDied","Data":"3a6a01de42a1b5364e692c5e549223e5d88934b2a0d892da1eb4a56db811d7f1"} Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.478376 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a6a01de42a1b5364e692c5e549223e5d88934b2a0d892da1eb4a56db811d7f1" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.478404 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.568336 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh"] Sep 30 00:51:39 crc kubenswrapper[4809]: E0930 00:51:39.568898 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2300c556-cca3-451c-bbf5-1f1dd17a3d41" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.568923 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="2300c556-cca3-451c-bbf5-1f1dd17a3d41" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.569246 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="2300c556-cca3-451c-bbf5-1f1dd17a3d41" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.571103 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.573900 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-ipmi-config-data" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.574217 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.574445 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.574720 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.575207 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.581565 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh"] Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.709311 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-ipmi-config-data-0\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ceilometer-ipmi-config-data-0\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.709382 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-ipmi-config-data-2\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ceilometer-ipmi-config-data-2\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.709489 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-ipmi-config-data-1\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ceilometer-ipmi-config-data-1\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.709547 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ssh-key\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.709583 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-telemetry-power-monitoring-combined-ca-bundle\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.709611 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5w825\" (UniqueName: \"kubernetes.io/projected/92157a7e-beb8-4789-9818-325fa359b0d0-kube-api-access-5w825\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.709669 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-inventory\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.811256 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-telemetry-power-monitoring-combined-ca-bundle\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.811355 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5w825\" (UniqueName: \"kubernetes.io/projected/92157a7e-beb8-4789-9818-325fa359b0d0-kube-api-access-5w825\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.811445 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-inventory\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.811502 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-ipmi-config-data-0\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ceilometer-ipmi-config-data-0\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.811575 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-ipmi-config-data-2\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ceilometer-ipmi-config-data-2\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.811757 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-ipmi-config-data-1\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ceilometer-ipmi-config-data-1\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.811840 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ssh-key\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.815358 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-ipmi-config-data-2\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ceilometer-ipmi-config-data-2\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.817625 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ssh-key\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.818072 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-ipmi-config-data-1\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ceilometer-ipmi-config-data-1\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.818771 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-ipmi-config-data-0\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ceilometer-ipmi-config-data-0\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.819733 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-telemetry-power-monitoring-combined-ca-bundle\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.819821 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-inventory\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.842253 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5w825\" (UniqueName: \"kubernetes.io/projected/92157a7e-beb8-4789-9818-325fa359b0d0-kube-api-access-5w825\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:39 crc kubenswrapper[4809]: I0930 00:51:39.902326 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:51:40 crc kubenswrapper[4809]: I0930 00:51:40.519175 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 00:51:40 crc kubenswrapper[4809]: I0930 00:51:40.528332 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh"] Sep 30 00:51:41 crc kubenswrapper[4809]: I0930 00:51:41.510244 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" event={"ID":"92157a7e-beb8-4789-9818-325fa359b0d0","Type":"ContainerStarted","Data":"be415b91b7c0ad089c80fb11dcb8700408f762609f958d9899ab9af750be6ff0"} Sep 30 00:51:41 crc kubenswrapper[4809]: I0930 00:51:41.510936 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" event={"ID":"92157a7e-beb8-4789-9818-325fa359b0d0","Type":"ContainerStarted","Data":"eb7795a5873f06472a3954d464207fb649172ccde7adafe67ef2d39d3d933faa"} Sep 30 00:51:41 crc kubenswrapper[4809]: I0930 00:51:41.537455 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" podStartSLOduration=2.056063074 podStartE2EDuration="2.537429659s" podCreationTimestamp="2025-09-30 00:51:39 +0000 UTC" firstStartedPulling="2025-09-30 00:51:40.518896332 +0000 UTC m=+2551.555145750" lastFinishedPulling="2025-09-30 00:51:41.000262907 +0000 UTC m=+2552.036512335" observedRunningTime="2025-09-30 00:51:41.526900379 +0000 UTC m=+2552.563149797" watchObservedRunningTime="2025-09-30 00:51:41.537429659 +0000 UTC m=+2552.573679107" Sep 30 00:53:55 crc kubenswrapper[4809]: I0930 00:53:55.324829 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:53:55 crc kubenswrapper[4809]: I0930 00:53:55.325597 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:53:58 crc kubenswrapper[4809]: I0930 00:53:58.279165 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6t7jh"] Sep 30 00:53:58 crc kubenswrapper[4809]: I0930 00:53:58.283001 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6t7jh" Sep 30 00:53:58 crc kubenswrapper[4809]: I0930 00:53:58.298398 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6t7jh"] Sep 30 00:53:58 crc kubenswrapper[4809]: I0930 00:53:58.338569 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4481a0e-a8f1-4a28-9b72-3b9fcbe50608-utilities\") pod \"certified-operators-6t7jh\" (UID: \"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608\") " pod="openshift-marketplace/certified-operators-6t7jh" Sep 30 00:53:58 crc kubenswrapper[4809]: I0930 00:53:58.338728 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4481a0e-a8f1-4a28-9b72-3b9fcbe50608-catalog-content\") pod \"certified-operators-6t7jh\" (UID: \"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608\") " pod="openshift-marketplace/certified-operators-6t7jh" Sep 30 00:53:58 crc kubenswrapper[4809]: I0930 00:53:58.440060 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4481a0e-a8f1-4a28-9b72-3b9fcbe50608-catalog-content\") pod \"certified-operators-6t7jh\" (UID: \"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608\") " pod="openshift-marketplace/certified-operators-6t7jh" Sep 30 00:53:58 crc kubenswrapper[4809]: I0930 00:53:58.440161 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdmzs\" (UniqueName: \"kubernetes.io/projected/f4481a0e-a8f1-4a28-9b72-3b9fcbe50608-kube-api-access-gdmzs\") pod \"certified-operators-6t7jh\" (UID: \"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608\") " pod="openshift-marketplace/certified-operators-6t7jh" Sep 30 00:53:58 crc kubenswrapper[4809]: I0930 00:53:58.440256 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4481a0e-a8f1-4a28-9b72-3b9fcbe50608-utilities\") pod \"certified-operators-6t7jh\" (UID: \"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608\") " pod="openshift-marketplace/certified-operators-6t7jh" Sep 30 00:53:58 crc kubenswrapper[4809]: I0930 00:53:58.440555 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4481a0e-a8f1-4a28-9b72-3b9fcbe50608-catalog-content\") pod \"certified-operators-6t7jh\" (UID: \"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608\") " pod="openshift-marketplace/certified-operators-6t7jh" Sep 30 00:53:58 crc kubenswrapper[4809]: I0930 00:53:58.440560 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4481a0e-a8f1-4a28-9b72-3b9fcbe50608-utilities\") pod \"certified-operators-6t7jh\" (UID: \"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608\") " pod="openshift-marketplace/certified-operators-6t7jh" Sep 30 00:53:58 crc kubenswrapper[4809]: I0930 00:53:58.543353 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdmzs\" (UniqueName: \"kubernetes.io/projected/f4481a0e-a8f1-4a28-9b72-3b9fcbe50608-kube-api-access-gdmzs\") pod \"certified-operators-6t7jh\" (UID: \"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608\") " pod="openshift-marketplace/certified-operators-6t7jh" Sep 30 00:53:58 crc kubenswrapper[4809]: I0930 00:53:58.575360 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdmzs\" (UniqueName: \"kubernetes.io/projected/f4481a0e-a8f1-4a28-9b72-3b9fcbe50608-kube-api-access-gdmzs\") pod \"certified-operators-6t7jh\" (UID: \"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608\") " pod="openshift-marketplace/certified-operators-6t7jh" Sep 30 00:53:58 crc kubenswrapper[4809]: I0930 00:53:58.611217 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6t7jh" Sep 30 00:53:59 crc kubenswrapper[4809]: I0930 00:53:59.136976 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6t7jh"] Sep 30 00:54:00 crc kubenswrapper[4809]: I0930 00:54:00.053725 4809 generic.go:334] "Generic (PLEG): container finished" podID="f4481a0e-a8f1-4a28-9b72-3b9fcbe50608" containerID="ac2a9f2329f45a01ee72d00bb7d8aaf15524aa585e5e3ae7359032c64f8f8e43" exitCode=0 Sep 30 00:54:00 crc kubenswrapper[4809]: I0930 00:54:00.053781 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6t7jh" event={"ID":"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608","Type":"ContainerDied","Data":"ac2a9f2329f45a01ee72d00bb7d8aaf15524aa585e5e3ae7359032c64f8f8e43"} Sep 30 00:54:00 crc kubenswrapper[4809]: I0930 00:54:00.054109 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6t7jh" event={"ID":"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608","Type":"ContainerStarted","Data":"98508730ab2499b954a9f23820e91142572fb81d8424dfcea904f5076e5fff03"} Sep 30 00:54:02 crc kubenswrapper[4809]: I0930 00:54:02.101030 4809 generic.go:334] "Generic (PLEG): container finished" podID="f4481a0e-a8f1-4a28-9b72-3b9fcbe50608" containerID="fcd66d44f91205aa429fda1bb80c56a32d4c0f4bcdfd3902ed53aa66f1301797" exitCode=0 Sep 30 00:54:02 crc kubenswrapper[4809]: I0930 00:54:02.101136 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6t7jh" event={"ID":"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608","Type":"ContainerDied","Data":"fcd66d44f91205aa429fda1bb80c56a32d4c0f4bcdfd3902ed53aa66f1301797"} Sep 30 00:54:03 crc kubenswrapper[4809]: I0930 00:54:03.116847 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6t7jh" event={"ID":"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608","Type":"ContainerStarted","Data":"02978f4db3d4f047b38c57eb5bd5172976a8575372c8d62e528cfe0f578dc005"} Sep 30 00:54:03 crc kubenswrapper[4809]: I0930 00:54:03.141713 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6t7jh" podStartSLOduration=2.666241016 podStartE2EDuration="5.14169684s" podCreationTimestamp="2025-09-30 00:53:58 +0000 UTC" firstStartedPulling="2025-09-30 00:54:00.055690972 +0000 UTC m=+2691.091940400" lastFinishedPulling="2025-09-30 00:54:02.531146776 +0000 UTC m=+2693.567396224" observedRunningTime="2025-09-30 00:54:03.137292679 +0000 UTC m=+2694.173542097" watchObservedRunningTime="2025-09-30 00:54:03.14169684 +0000 UTC m=+2694.177946248" Sep 30 00:54:05 crc kubenswrapper[4809]: I0930 00:54:05.141423 4809 generic.go:334] "Generic (PLEG): container finished" podID="92157a7e-beb8-4789-9818-325fa359b0d0" containerID="be415b91b7c0ad089c80fb11dcb8700408f762609f958d9899ab9af750be6ff0" exitCode=0 Sep 30 00:54:05 crc kubenswrapper[4809]: I0930 00:54:05.141441 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" event={"ID":"92157a7e-beb8-4789-9818-325fa359b0d0","Type":"ContainerDied","Data":"be415b91b7c0ad089c80fb11dcb8700408f762609f958d9899ab9af750be6ff0"} Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.713926 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.821330 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5w825\" (UniqueName: \"kubernetes.io/projected/92157a7e-beb8-4789-9818-325fa359b0d0-kube-api-access-5w825\") pod \"92157a7e-beb8-4789-9818-325fa359b0d0\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.822027 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-telemetry-power-monitoring-combined-ca-bundle\") pod \"92157a7e-beb8-4789-9818-325fa359b0d0\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.822101 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ssh-key\") pod \"92157a7e-beb8-4789-9818-325fa359b0d0\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.822154 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-ipmi-config-data-2\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ceilometer-ipmi-config-data-2\") pod \"92157a7e-beb8-4789-9818-325fa359b0d0\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.822188 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-inventory\") pod \"92157a7e-beb8-4789-9818-325fa359b0d0\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.822225 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-ipmi-config-data-0\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ceilometer-ipmi-config-data-0\") pod \"92157a7e-beb8-4789-9818-325fa359b0d0\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.822277 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-ipmi-config-data-1\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ceilometer-ipmi-config-data-1\") pod \"92157a7e-beb8-4789-9818-325fa359b0d0\" (UID: \"92157a7e-beb8-4789-9818-325fa359b0d0\") " Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.830471 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92157a7e-beb8-4789-9818-325fa359b0d0-kube-api-access-5w825" (OuterVolumeSpecName: "kube-api-access-5w825") pod "92157a7e-beb8-4789-9818-325fa359b0d0" (UID: "92157a7e-beb8-4789-9818-325fa359b0d0"). InnerVolumeSpecName "kube-api-access-5w825". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.831336 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-telemetry-power-monitoring-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-power-monitoring-combined-ca-bundle") pod "92157a7e-beb8-4789-9818-325fa359b0d0" (UID: "92157a7e-beb8-4789-9818-325fa359b0d0"). InnerVolumeSpecName "telemetry-power-monitoring-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.865066 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "92157a7e-beb8-4789-9818-325fa359b0d0" (UID: "92157a7e-beb8-4789-9818-325fa359b0d0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.865481 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ceilometer-ipmi-config-data-2" (OuterVolumeSpecName: "ceilometer-ipmi-config-data-2") pod "92157a7e-beb8-4789-9818-325fa359b0d0" (UID: "92157a7e-beb8-4789-9818-325fa359b0d0"). InnerVolumeSpecName "ceilometer-ipmi-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.873283 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-inventory" (OuterVolumeSpecName: "inventory") pod "92157a7e-beb8-4789-9818-325fa359b0d0" (UID: "92157a7e-beb8-4789-9818-325fa359b0d0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.884452 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ceilometer-ipmi-config-data-1" (OuterVolumeSpecName: "ceilometer-ipmi-config-data-1") pod "92157a7e-beb8-4789-9818-325fa359b0d0" (UID: "92157a7e-beb8-4789-9818-325fa359b0d0"). InnerVolumeSpecName "ceilometer-ipmi-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.887600 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ceilometer-ipmi-config-data-0" (OuterVolumeSpecName: "ceilometer-ipmi-config-data-0") pod "92157a7e-beb8-4789-9818-325fa359b0d0" (UID: "92157a7e-beb8-4789-9818-325fa359b0d0"). InnerVolumeSpecName "ceilometer-ipmi-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.925706 4809 reconciler_common.go:293] "Volume detached for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-telemetry-power-monitoring-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.925750 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.925769 4809 reconciler_common.go:293] "Volume detached for volume \"ceilometer-ipmi-config-data-2\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ceilometer-ipmi-config-data-2\") on node \"crc\" DevicePath \"\"" Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.925788 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.925807 4809 reconciler_common.go:293] "Volume detached for volume \"ceilometer-ipmi-config-data-0\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ceilometer-ipmi-config-data-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.925823 4809 reconciler_common.go:293] "Volume detached for volume \"ceilometer-ipmi-config-data-1\" (UniqueName: \"kubernetes.io/secret/92157a7e-beb8-4789-9818-325fa359b0d0-ceilometer-ipmi-config-data-1\") on node \"crc\" DevicePath \"\"" Sep 30 00:54:06 crc kubenswrapper[4809]: I0930 00:54:06.925844 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5w825\" (UniqueName: \"kubernetes.io/projected/92157a7e-beb8-4789-9818-325fa359b0d0-kube-api-access-5w825\") on node \"crc\" DevicePath \"\"" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.172018 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" event={"ID":"92157a7e-beb8-4789-9818-325fa359b0d0","Type":"ContainerDied","Data":"eb7795a5873f06472a3954d464207fb649172ccde7adafe67ef2d39d3d933faa"} Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.172082 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb7795a5873f06472a3954d464207fb649172ccde7adafe67ef2d39d3d933faa" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.172603 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.281821 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz"] Sep 30 00:54:07 crc kubenswrapper[4809]: E0930 00:54:07.282598 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92157a7e-beb8-4789-9818-325fa359b0d0" containerName="telemetry-power-monitoring-edpm-deployment-openstack-edpm-ipam" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.282655 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="92157a7e-beb8-4789-9818-325fa359b0d0" containerName="telemetry-power-monitoring-edpm-deployment-openstack-edpm-ipam" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.283006 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="92157a7e-beb8-4789-9818-325fa359b0d0" containerName="telemetry-power-monitoring-edpm-deployment-openstack-edpm-ipam" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.284406 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.287553 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.288037 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"logging-compute-config-data" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.288342 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.288731 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.289918 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.300286 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz"] Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.439969 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-logging-compute-config-data-1\") pod \"logging-edpm-deployment-openstack-edpm-ipam-ds6tz\" (UID: \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.440024 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-logging-compute-config-data-0\") pod \"logging-edpm-deployment-openstack-edpm-ipam-ds6tz\" (UID: \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.440053 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dg42\" (UniqueName: \"kubernetes.io/projected/ca0c759d-ee70-41e7-b36d-0e1652aa1021-kube-api-access-6dg42\") pod \"logging-edpm-deployment-openstack-edpm-ipam-ds6tz\" (UID: \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.440159 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-ssh-key\") pod \"logging-edpm-deployment-openstack-edpm-ipam-ds6tz\" (UID: \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.440238 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-inventory\") pod \"logging-edpm-deployment-openstack-edpm-ipam-ds6tz\" (UID: \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.542967 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-inventory\") pod \"logging-edpm-deployment-openstack-edpm-ipam-ds6tz\" (UID: \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.543234 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-logging-compute-config-data-1\") pod \"logging-edpm-deployment-openstack-edpm-ipam-ds6tz\" (UID: \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.543276 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-logging-compute-config-data-0\") pod \"logging-edpm-deployment-openstack-edpm-ipam-ds6tz\" (UID: \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.543314 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dg42\" (UniqueName: \"kubernetes.io/projected/ca0c759d-ee70-41e7-b36d-0e1652aa1021-kube-api-access-6dg42\") pod \"logging-edpm-deployment-openstack-edpm-ipam-ds6tz\" (UID: \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.543400 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-ssh-key\") pod \"logging-edpm-deployment-openstack-edpm-ipam-ds6tz\" (UID: \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.548537 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-inventory\") pod \"logging-edpm-deployment-openstack-edpm-ipam-ds6tz\" (UID: \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.549268 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-logging-compute-config-data-0\") pod \"logging-edpm-deployment-openstack-edpm-ipam-ds6tz\" (UID: \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.549728 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-ssh-key\") pod \"logging-edpm-deployment-openstack-edpm-ipam-ds6tz\" (UID: \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.550602 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-logging-compute-config-data-1\") pod \"logging-edpm-deployment-openstack-edpm-ipam-ds6tz\" (UID: \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.574529 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dg42\" (UniqueName: \"kubernetes.io/projected/ca0c759d-ee70-41e7-b36d-0e1652aa1021-kube-api-access-6dg42\") pod \"logging-edpm-deployment-openstack-edpm-ipam-ds6tz\" (UID: \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" Sep 30 00:54:07 crc kubenswrapper[4809]: I0930 00:54:07.612475 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" Sep 30 00:54:08 crc kubenswrapper[4809]: I0930 00:54:08.611528 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6t7jh" Sep 30 00:54:08 crc kubenswrapper[4809]: I0930 00:54:08.614073 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6t7jh" Sep 30 00:54:08 crc kubenswrapper[4809]: I0930 00:54:08.695363 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6t7jh" Sep 30 00:54:08 crc kubenswrapper[4809]: I0930 00:54:08.779218 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz"] Sep 30 00:54:08 crc kubenswrapper[4809]: W0930 00:54:08.783460 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podca0c759d_ee70_41e7_b36d_0e1652aa1021.slice/crio-93520830796ea0e1d9aaa3e5f7bcabaf6bb54db45ca87efac7ecf81122762590 WatchSource:0}: Error finding container 93520830796ea0e1d9aaa3e5f7bcabaf6bb54db45ca87efac7ecf81122762590: Status 404 returned error can't find the container with id 93520830796ea0e1d9aaa3e5f7bcabaf6bb54db45ca87efac7ecf81122762590 Sep 30 00:54:09 crc kubenswrapper[4809]: I0930 00:54:09.204734 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" event={"ID":"ca0c759d-ee70-41e7-b36d-0e1652aa1021","Type":"ContainerStarted","Data":"93520830796ea0e1d9aaa3e5f7bcabaf6bb54db45ca87efac7ecf81122762590"} Sep 30 00:54:09 crc kubenswrapper[4809]: I0930 00:54:09.300095 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6t7jh" Sep 30 00:54:09 crc kubenswrapper[4809]: I0930 00:54:09.355778 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6t7jh"] Sep 30 00:54:10 crc kubenswrapper[4809]: I0930 00:54:10.220132 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" event={"ID":"ca0c759d-ee70-41e7-b36d-0e1652aa1021","Type":"ContainerStarted","Data":"faf71e3d365cb268d95dd97f415faf5e8f6ad18c7314fa6bb12c5f7024540c75"} Sep 30 00:54:10 crc kubenswrapper[4809]: I0930 00:54:10.262403 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" podStartSLOduration=2.789259271 podStartE2EDuration="3.262372818s" podCreationTimestamp="2025-09-30 00:54:07 +0000 UTC" firstStartedPulling="2025-09-30 00:54:08.786835558 +0000 UTC m=+2699.823084976" lastFinishedPulling="2025-09-30 00:54:09.259949075 +0000 UTC m=+2700.296198523" observedRunningTime="2025-09-30 00:54:10.252247018 +0000 UTC m=+2701.288496436" watchObservedRunningTime="2025-09-30 00:54:10.262372818 +0000 UTC m=+2701.298622266" Sep 30 00:54:11 crc kubenswrapper[4809]: I0930 00:54:11.227079 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6t7jh" podUID="f4481a0e-a8f1-4a28-9b72-3b9fcbe50608" containerName="registry-server" containerID="cri-o://02978f4db3d4f047b38c57eb5bd5172976a8575372c8d62e528cfe0f578dc005" gracePeriod=2 Sep 30 00:54:11 crc kubenswrapper[4809]: I0930 00:54:11.789000 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6t7jh" Sep 30 00:54:11 crc kubenswrapper[4809]: I0930 00:54:11.946870 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4481a0e-a8f1-4a28-9b72-3b9fcbe50608-catalog-content\") pod \"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608\" (UID: \"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608\") " Sep 30 00:54:11 crc kubenswrapper[4809]: I0930 00:54:11.947558 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdmzs\" (UniqueName: \"kubernetes.io/projected/f4481a0e-a8f1-4a28-9b72-3b9fcbe50608-kube-api-access-gdmzs\") pod \"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608\" (UID: \"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608\") " Sep 30 00:54:11 crc kubenswrapper[4809]: I0930 00:54:11.947704 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4481a0e-a8f1-4a28-9b72-3b9fcbe50608-utilities\") pod \"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608\" (UID: \"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608\") " Sep 30 00:54:11 crc kubenswrapper[4809]: I0930 00:54:11.948522 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4481a0e-a8f1-4a28-9b72-3b9fcbe50608-utilities" (OuterVolumeSpecName: "utilities") pod "f4481a0e-a8f1-4a28-9b72-3b9fcbe50608" (UID: "f4481a0e-a8f1-4a28-9b72-3b9fcbe50608"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:54:11 crc kubenswrapper[4809]: I0930 00:54:11.953030 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4481a0e-a8f1-4a28-9b72-3b9fcbe50608-kube-api-access-gdmzs" (OuterVolumeSpecName: "kube-api-access-gdmzs") pod "f4481a0e-a8f1-4a28-9b72-3b9fcbe50608" (UID: "f4481a0e-a8f1-4a28-9b72-3b9fcbe50608"). InnerVolumeSpecName "kube-api-access-gdmzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:54:12 crc kubenswrapper[4809]: I0930 00:54:12.004279 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4481a0e-a8f1-4a28-9b72-3b9fcbe50608-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f4481a0e-a8f1-4a28-9b72-3b9fcbe50608" (UID: "f4481a0e-a8f1-4a28-9b72-3b9fcbe50608"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:54:12 crc kubenswrapper[4809]: I0930 00:54:12.050909 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdmzs\" (UniqueName: \"kubernetes.io/projected/f4481a0e-a8f1-4a28-9b72-3b9fcbe50608-kube-api-access-gdmzs\") on node \"crc\" DevicePath \"\"" Sep 30 00:54:12 crc kubenswrapper[4809]: I0930 00:54:12.050941 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4481a0e-a8f1-4a28-9b72-3b9fcbe50608-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:54:12 crc kubenswrapper[4809]: I0930 00:54:12.050951 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4481a0e-a8f1-4a28-9b72-3b9fcbe50608-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:54:12 crc kubenswrapper[4809]: I0930 00:54:12.239764 4809 generic.go:334] "Generic (PLEG): container finished" podID="f4481a0e-a8f1-4a28-9b72-3b9fcbe50608" containerID="02978f4db3d4f047b38c57eb5bd5172976a8575372c8d62e528cfe0f578dc005" exitCode=0 Sep 30 00:54:12 crc kubenswrapper[4809]: I0930 00:54:12.239828 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6t7jh" event={"ID":"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608","Type":"ContainerDied","Data":"02978f4db3d4f047b38c57eb5bd5172976a8575372c8d62e528cfe0f578dc005"} Sep 30 00:54:12 crc kubenswrapper[4809]: I0930 00:54:12.239858 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6t7jh" event={"ID":"f4481a0e-a8f1-4a28-9b72-3b9fcbe50608","Type":"ContainerDied","Data":"98508730ab2499b954a9f23820e91142572fb81d8424dfcea904f5076e5fff03"} Sep 30 00:54:12 crc kubenswrapper[4809]: I0930 00:54:12.239879 4809 scope.go:117] "RemoveContainer" containerID="02978f4db3d4f047b38c57eb5bd5172976a8575372c8d62e528cfe0f578dc005" Sep 30 00:54:12 crc kubenswrapper[4809]: I0930 00:54:12.240123 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6t7jh" Sep 30 00:54:12 crc kubenswrapper[4809]: I0930 00:54:12.275177 4809 scope.go:117] "RemoveContainer" containerID="fcd66d44f91205aa429fda1bb80c56a32d4c0f4bcdfd3902ed53aa66f1301797" Sep 30 00:54:12 crc kubenswrapper[4809]: I0930 00:54:12.288272 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6t7jh"] Sep 30 00:54:12 crc kubenswrapper[4809]: I0930 00:54:12.302739 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6t7jh"] Sep 30 00:54:12 crc kubenswrapper[4809]: I0930 00:54:12.302809 4809 scope.go:117] "RemoveContainer" containerID="ac2a9f2329f45a01ee72d00bb7d8aaf15524aa585e5e3ae7359032c64f8f8e43" Sep 30 00:54:12 crc kubenswrapper[4809]: I0930 00:54:12.392673 4809 scope.go:117] "RemoveContainer" containerID="02978f4db3d4f047b38c57eb5bd5172976a8575372c8d62e528cfe0f578dc005" Sep 30 00:54:12 crc kubenswrapper[4809]: E0930 00:54:12.393077 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02978f4db3d4f047b38c57eb5bd5172976a8575372c8d62e528cfe0f578dc005\": container with ID starting with 02978f4db3d4f047b38c57eb5bd5172976a8575372c8d62e528cfe0f578dc005 not found: ID does not exist" containerID="02978f4db3d4f047b38c57eb5bd5172976a8575372c8d62e528cfe0f578dc005" Sep 30 00:54:12 crc kubenswrapper[4809]: I0930 00:54:12.393141 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02978f4db3d4f047b38c57eb5bd5172976a8575372c8d62e528cfe0f578dc005"} err="failed to get container status \"02978f4db3d4f047b38c57eb5bd5172976a8575372c8d62e528cfe0f578dc005\": rpc error: code = NotFound desc = could not find container \"02978f4db3d4f047b38c57eb5bd5172976a8575372c8d62e528cfe0f578dc005\": container with ID starting with 02978f4db3d4f047b38c57eb5bd5172976a8575372c8d62e528cfe0f578dc005 not found: ID does not exist" Sep 30 00:54:12 crc kubenswrapper[4809]: I0930 00:54:12.393172 4809 scope.go:117] "RemoveContainer" containerID="fcd66d44f91205aa429fda1bb80c56a32d4c0f4bcdfd3902ed53aa66f1301797" Sep 30 00:54:12 crc kubenswrapper[4809]: E0930 00:54:12.393449 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcd66d44f91205aa429fda1bb80c56a32d4c0f4bcdfd3902ed53aa66f1301797\": container with ID starting with fcd66d44f91205aa429fda1bb80c56a32d4c0f4bcdfd3902ed53aa66f1301797 not found: ID does not exist" containerID="fcd66d44f91205aa429fda1bb80c56a32d4c0f4bcdfd3902ed53aa66f1301797" Sep 30 00:54:12 crc kubenswrapper[4809]: I0930 00:54:12.393479 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcd66d44f91205aa429fda1bb80c56a32d4c0f4bcdfd3902ed53aa66f1301797"} err="failed to get container status \"fcd66d44f91205aa429fda1bb80c56a32d4c0f4bcdfd3902ed53aa66f1301797\": rpc error: code = NotFound desc = could not find container \"fcd66d44f91205aa429fda1bb80c56a32d4c0f4bcdfd3902ed53aa66f1301797\": container with ID starting with fcd66d44f91205aa429fda1bb80c56a32d4c0f4bcdfd3902ed53aa66f1301797 not found: ID does not exist" Sep 30 00:54:12 crc kubenswrapper[4809]: I0930 00:54:12.393501 4809 scope.go:117] "RemoveContainer" containerID="ac2a9f2329f45a01ee72d00bb7d8aaf15524aa585e5e3ae7359032c64f8f8e43" Sep 30 00:54:12 crc kubenswrapper[4809]: E0930 00:54:12.393770 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac2a9f2329f45a01ee72d00bb7d8aaf15524aa585e5e3ae7359032c64f8f8e43\": container with ID starting with ac2a9f2329f45a01ee72d00bb7d8aaf15524aa585e5e3ae7359032c64f8f8e43 not found: ID does not exist" containerID="ac2a9f2329f45a01ee72d00bb7d8aaf15524aa585e5e3ae7359032c64f8f8e43" Sep 30 00:54:12 crc kubenswrapper[4809]: I0930 00:54:12.393827 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac2a9f2329f45a01ee72d00bb7d8aaf15524aa585e5e3ae7359032c64f8f8e43"} err="failed to get container status \"ac2a9f2329f45a01ee72d00bb7d8aaf15524aa585e5e3ae7359032c64f8f8e43\": rpc error: code = NotFound desc = could not find container \"ac2a9f2329f45a01ee72d00bb7d8aaf15524aa585e5e3ae7359032c64f8f8e43\": container with ID starting with ac2a9f2329f45a01ee72d00bb7d8aaf15524aa585e5e3ae7359032c64f8f8e43 not found: ID does not exist" Sep 30 00:54:13 crc kubenswrapper[4809]: I0930 00:54:13.714095 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4481a0e-a8f1-4a28-9b72-3b9fcbe50608" path="/var/lib/kubelet/pods/f4481a0e-a8f1-4a28-9b72-3b9fcbe50608/volumes" Sep 30 00:54:25 crc kubenswrapper[4809]: I0930 00:54:25.324696 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:54:25 crc kubenswrapper[4809]: I0930 00:54:25.325231 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:54:30 crc kubenswrapper[4809]: I0930 00:54:30.479820 4809 generic.go:334] "Generic (PLEG): container finished" podID="ca0c759d-ee70-41e7-b36d-0e1652aa1021" containerID="faf71e3d365cb268d95dd97f415faf5e8f6ad18c7314fa6bb12c5f7024540c75" exitCode=0 Sep 30 00:54:30 crc kubenswrapper[4809]: I0930 00:54:30.479883 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" event={"ID":"ca0c759d-ee70-41e7-b36d-0e1652aa1021","Type":"ContainerDied","Data":"faf71e3d365cb268d95dd97f415faf5e8f6ad18c7314fa6bb12c5f7024540c75"} Sep 30 00:54:31 crc kubenswrapper[4809]: I0930 00:54:31.936534 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" Sep 30 00:54:32 crc kubenswrapper[4809]: I0930 00:54:32.013938 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-ssh-key\") pod \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\" (UID: \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\") " Sep 30 00:54:32 crc kubenswrapper[4809]: I0930 00:54:32.014092 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logging-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-logging-compute-config-data-0\") pod \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\" (UID: \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\") " Sep 30 00:54:32 crc kubenswrapper[4809]: I0930 00:54:32.014220 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logging-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-logging-compute-config-data-1\") pod \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\" (UID: \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\") " Sep 30 00:54:32 crc kubenswrapper[4809]: I0930 00:54:32.014296 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dg42\" (UniqueName: \"kubernetes.io/projected/ca0c759d-ee70-41e7-b36d-0e1652aa1021-kube-api-access-6dg42\") pod \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\" (UID: \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\") " Sep 30 00:54:32 crc kubenswrapper[4809]: I0930 00:54:32.014329 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-inventory\") pod \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\" (UID: \"ca0c759d-ee70-41e7-b36d-0e1652aa1021\") " Sep 30 00:54:32 crc kubenswrapper[4809]: I0930 00:54:32.021618 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca0c759d-ee70-41e7-b36d-0e1652aa1021-kube-api-access-6dg42" (OuterVolumeSpecName: "kube-api-access-6dg42") pod "ca0c759d-ee70-41e7-b36d-0e1652aa1021" (UID: "ca0c759d-ee70-41e7-b36d-0e1652aa1021"). InnerVolumeSpecName "kube-api-access-6dg42". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:54:32 crc kubenswrapper[4809]: I0930 00:54:32.044979 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-inventory" (OuterVolumeSpecName: "inventory") pod "ca0c759d-ee70-41e7-b36d-0e1652aa1021" (UID: "ca0c759d-ee70-41e7-b36d-0e1652aa1021"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:54:32 crc kubenswrapper[4809]: I0930 00:54:32.046010 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ca0c759d-ee70-41e7-b36d-0e1652aa1021" (UID: "ca0c759d-ee70-41e7-b36d-0e1652aa1021"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:54:32 crc kubenswrapper[4809]: I0930 00:54:32.048660 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-logging-compute-config-data-0" (OuterVolumeSpecName: "logging-compute-config-data-0") pod "ca0c759d-ee70-41e7-b36d-0e1652aa1021" (UID: "ca0c759d-ee70-41e7-b36d-0e1652aa1021"). InnerVolumeSpecName "logging-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:54:32 crc kubenswrapper[4809]: I0930 00:54:32.058601 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-logging-compute-config-data-1" (OuterVolumeSpecName: "logging-compute-config-data-1") pod "ca0c759d-ee70-41e7-b36d-0e1652aa1021" (UID: "ca0c759d-ee70-41e7-b36d-0e1652aa1021"). InnerVolumeSpecName "logging-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:54:32 crc kubenswrapper[4809]: I0930 00:54:32.116745 4809 reconciler_common.go:293] "Volume detached for volume \"logging-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-logging-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:54:32 crc kubenswrapper[4809]: I0930 00:54:32.116789 4809 reconciler_common.go:293] "Volume detached for volume \"logging-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-logging-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Sep 30 00:54:32 crc kubenswrapper[4809]: I0930 00:54:32.116804 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dg42\" (UniqueName: \"kubernetes.io/projected/ca0c759d-ee70-41e7-b36d-0e1652aa1021-kube-api-access-6dg42\") on node \"crc\" DevicePath \"\"" Sep 30 00:54:32 crc kubenswrapper[4809]: I0930 00:54:32.116822 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:54:32 crc kubenswrapper[4809]: I0930 00:54:32.116835 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca0c759d-ee70-41e7-b36d-0e1652aa1021-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:54:32 crc kubenswrapper[4809]: I0930 00:54:32.509184 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" event={"ID":"ca0c759d-ee70-41e7-b36d-0e1652aa1021","Type":"ContainerDied","Data":"93520830796ea0e1d9aaa3e5f7bcabaf6bb54db45ca87efac7ecf81122762590"} Sep 30 00:54:32 crc kubenswrapper[4809]: I0930 00:54:32.509259 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93520830796ea0e1d9aaa3e5f7bcabaf6bb54db45ca87efac7ecf81122762590" Sep 30 00:54:32 crc kubenswrapper[4809]: I0930 00:54:32.509277 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz" Sep 30 00:54:55 crc kubenswrapper[4809]: I0930 00:54:55.325460 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:54:55 crc kubenswrapper[4809]: I0930 00:54:55.326101 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:54:55 crc kubenswrapper[4809]: I0930 00:54:55.326166 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:54:55 crc kubenswrapper[4809]: I0930 00:54:55.327081 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"038b8c1d2776a46501753a414c77ba4b88fa694557870a9e8ef4fc83907e99ed"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:54:55 crc kubenswrapper[4809]: I0930 00:54:55.327152 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://038b8c1d2776a46501753a414c77ba4b88fa694557870a9e8ef4fc83907e99ed" gracePeriod=600 Sep 30 00:54:55 crc kubenswrapper[4809]: I0930 00:54:55.784359 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="038b8c1d2776a46501753a414c77ba4b88fa694557870a9e8ef4fc83907e99ed" exitCode=0 Sep 30 00:54:55 crc kubenswrapper[4809]: I0930 00:54:55.784440 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"038b8c1d2776a46501753a414c77ba4b88fa694557870a9e8ef4fc83907e99ed"} Sep 30 00:54:55 crc kubenswrapper[4809]: I0930 00:54:55.784816 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9"} Sep 30 00:54:55 crc kubenswrapper[4809]: I0930 00:54:55.784841 4809 scope.go:117] "RemoveContainer" containerID="33dad9eebe82e88ffd7356d3072034ead99610910d3e261e2a663ad7398b8f1b" Sep 30 00:55:09 crc kubenswrapper[4809]: I0930 00:55:09.206442 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ncmpj"] Sep 30 00:55:09 crc kubenswrapper[4809]: E0930 00:55:09.208292 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca0c759d-ee70-41e7-b36d-0e1652aa1021" containerName="logging-edpm-deployment-openstack-edpm-ipam" Sep 30 00:55:09 crc kubenswrapper[4809]: I0930 00:55:09.208360 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca0c759d-ee70-41e7-b36d-0e1652aa1021" containerName="logging-edpm-deployment-openstack-edpm-ipam" Sep 30 00:55:09 crc kubenswrapper[4809]: E0930 00:55:09.208428 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4481a0e-a8f1-4a28-9b72-3b9fcbe50608" containerName="extract-utilities" Sep 30 00:55:09 crc kubenswrapper[4809]: I0930 00:55:09.208478 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4481a0e-a8f1-4a28-9b72-3b9fcbe50608" containerName="extract-utilities" Sep 30 00:55:09 crc kubenswrapper[4809]: E0930 00:55:09.208527 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4481a0e-a8f1-4a28-9b72-3b9fcbe50608" containerName="extract-content" Sep 30 00:55:09 crc kubenswrapper[4809]: I0930 00:55:09.208576 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4481a0e-a8f1-4a28-9b72-3b9fcbe50608" containerName="extract-content" Sep 30 00:55:09 crc kubenswrapper[4809]: E0930 00:55:09.208651 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4481a0e-a8f1-4a28-9b72-3b9fcbe50608" containerName="registry-server" Sep 30 00:55:09 crc kubenswrapper[4809]: I0930 00:55:09.208709 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4481a0e-a8f1-4a28-9b72-3b9fcbe50608" containerName="registry-server" Sep 30 00:55:09 crc kubenswrapper[4809]: I0930 00:55:09.209008 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca0c759d-ee70-41e7-b36d-0e1652aa1021" containerName="logging-edpm-deployment-openstack-edpm-ipam" Sep 30 00:55:09 crc kubenswrapper[4809]: I0930 00:55:09.209081 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4481a0e-a8f1-4a28-9b72-3b9fcbe50608" containerName="registry-server" Sep 30 00:55:09 crc kubenswrapper[4809]: I0930 00:55:09.210726 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ncmpj" Sep 30 00:55:09 crc kubenswrapper[4809]: I0930 00:55:09.230744 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ncmpj"] Sep 30 00:55:09 crc kubenswrapper[4809]: I0930 00:55:09.364923 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctqbd\" (UniqueName: \"kubernetes.io/projected/6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa-kube-api-access-ctqbd\") pod \"redhat-marketplace-ncmpj\" (UID: \"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa\") " pod="openshift-marketplace/redhat-marketplace-ncmpj" Sep 30 00:55:09 crc kubenswrapper[4809]: I0930 00:55:09.365051 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa-catalog-content\") pod \"redhat-marketplace-ncmpj\" (UID: \"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa\") " pod="openshift-marketplace/redhat-marketplace-ncmpj" Sep 30 00:55:09 crc kubenswrapper[4809]: I0930 00:55:09.365105 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa-utilities\") pod \"redhat-marketplace-ncmpj\" (UID: \"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa\") " pod="openshift-marketplace/redhat-marketplace-ncmpj" Sep 30 00:55:09 crc kubenswrapper[4809]: I0930 00:55:09.468072 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctqbd\" (UniqueName: \"kubernetes.io/projected/6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa-kube-api-access-ctqbd\") pod \"redhat-marketplace-ncmpj\" (UID: \"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa\") " pod="openshift-marketplace/redhat-marketplace-ncmpj" Sep 30 00:55:09 crc kubenswrapper[4809]: I0930 00:55:09.468252 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa-catalog-content\") pod \"redhat-marketplace-ncmpj\" (UID: \"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa\") " pod="openshift-marketplace/redhat-marketplace-ncmpj" Sep 30 00:55:09 crc kubenswrapper[4809]: I0930 00:55:09.468317 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa-utilities\") pod \"redhat-marketplace-ncmpj\" (UID: \"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa\") " pod="openshift-marketplace/redhat-marketplace-ncmpj" Sep 30 00:55:09 crc kubenswrapper[4809]: I0930 00:55:09.469137 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa-utilities\") pod \"redhat-marketplace-ncmpj\" (UID: \"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa\") " pod="openshift-marketplace/redhat-marketplace-ncmpj" Sep 30 00:55:09 crc kubenswrapper[4809]: I0930 00:55:09.469150 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa-catalog-content\") pod \"redhat-marketplace-ncmpj\" (UID: \"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa\") " pod="openshift-marketplace/redhat-marketplace-ncmpj" Sep 30 00:55:09 crc kubenswrapper[4809]: I0930 00:55:09.509612 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctqbd\" (UniqueName: \"kubernetes.io/projected/6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa-kube-api-access-ctqbd\") pod \"redhat-marketplace-ncmpj\" (UID: \"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa\") " pod="openshift-marketplace/redhat-marketplace-ncmpj" Sep 30 00:55:09 crc kubenswrapper[4809]: I0930 00:55:09.532017 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ncmpj" Sep 30 00:55:10 crc kubenswrapper[4809]: I0930 00:55:10.031607 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ncmpj"] Sep 30 00:55:10 crc kubenswrapper[4809]: W0930 00:55:10.036307 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fc2d6d6_ae6b_4dc2_9445_1c9d493971aa.slice/crio-b4efe1942438cadf81419701ac54e376f6f48b2aecfb040f54dc268e1c8cd1c4 WatchSource:0}: Error finding container b4efe1942438cadf81419701ac54e376f6f48b2aecfb040f54dc268e1c8cd1c4: Status 404 returned error can't find the container with id b4efe1942438cadf81419701ac54e376f6f48b2aecfb040f54dc268e1c8cd1c4 Sep 30 00:55:10 crc kubenswrapper[4809]: I0930 00:55:10.967404 4809 generic.go:334] "Generic (PLEG): container finished" podID="6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa" containerID="f92bdec5b5190d405a6bb6531a67debfae9f3b8784f50799766970378d95c2a3" exitCode=0 Sep 30 00:55:10 crc kubenswrapper[4809]: I0930 00:55:10.967484 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ncmpj" event={"ID":"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa","Type":"ContainerDied","Data":"f92bdec5b5190d405a6bb6531a67debfae9f3b8784f50799766970378d95c2a3"} Sep 30 00:55:10 crc kubenswrapper[4809]: I0930 00:55:10.967771 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ncmpj" event={"ID":"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa","Type":"ContainerStarted","Data":"b4efe1942438cadf81419701ac54e376f6f48b2aecfb040f54dc268e1c8cd1c4"} Sep 30 00:55:11 crc kubenswrapper[4809]: I0930 00:55:11.982810 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ncmpj" event={"ID":"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa","Type":"ContainerStarted","Data":"c3671eb16af64dad661a2d09cb34f4888cf050acc924326c7c3f742b981a40a2"} Sep 30 00:55:12 crc kubenswrapper[4809]: I0930 00:55:12.994260 4809 generic.go:334] "Generic (PLEG): container finished" podID="6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa" containerID="c3671eb16af64dad661a2d09cb34f4888cf050acc924326c7c3f742b981a40a2" exitCode=0 Sep 30 00:55:12 crc kubenswrapper[4809]: I0930 00:55:12.994367 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ncmpj" event={"ID":"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa","Type":"ContainerDied","Data":"c3671eb16af64dad661a2d09cb34f4888cf050acc924326c7c3f742b981a40a2"} Sep 30 00:55:14 crc kubenswrapper[4809]: I0930 00:55:14.006809 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ncmpj" event={"ID":"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa","Type":"ContainerStarted","Data":"43fb50409117781743746aea1afcd0cce616d5c4379d8271d701053a8e3d0c25"} Sep 30 00:55:14 crc kubenswrapper[4809]: I0930 00:55:14.025488 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ncmpj" podStartSLOduration=2.5226764470000003 podStartE2EDuration="5.025468845s" podCreationTimestamp="2025-09-30 00:55:09 +0000 UTC" firstStartedPulling="2025-09-30 00:55:10.970248185 +0000 UTC m=+2762.006497593" lastFinishedPulling="2025-09-30 00:55:13.473040543 +0000 UTC m=+2764.509289991" observedRunningTime="2025-09-30 00:55:14.023966803 +0000 UTC m=+2765.060216221" watchObservedRunningTime="2025-09-30 00:55:14.025468845 +0000 UTC m=+2765.061718263" Sep 30 00:55:19 crc kubenswrapper[4809]: I0930 00:55:19.532819 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ncmpj" Sep 30 00:55:19 crc kubenswrapper[4809]: I0930 00:55:19.533131 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ncmpj" Sep 30 00:55:19 crc kubenswrapper[4809]: I0930 00:55:19.586156 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ncmpj" Sep 30 00:55:20 crc kubenswrapper[4809]: I0930 00:55:20.122286 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ncmpj" Sep 30 00:55:21 crc kubenswrapper[4809]: I0930 00:55:21.786381 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ncmpj"] Sep 30 00:55:22 crc kubenswrapper[4809]: I0930 00:55:22.086632 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ncmpj" podUID="6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa" containerName="registry-server" containerID="cri-o://43fb50409117781743746aea1afcd0cce616d5c4379d8271d701053a8e3d0c25" gracePeriod=2 Sep 30 00:55:22 crc kubenswrapper[4809]: I0930 00:55:22.599440 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ncmpj" Sep 30 00:55:22 crc kubenswrapper[4809]: I0930 00:55:22.659175 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa-utilities\") pod \"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa\" (UID: \"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa\") " Sep 30 00:55:22 crc kubenswrapper[4809]: I0930 00:55:22.659340 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa-catalog-content\") pod \"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa\" (UID: \"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa\") " Sep 30 00:55:22 crc kubenswrapper[4809]: I0930 00:55:22.659427 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctqbd\" (UniqueName: \"kubernetes.io/projected/6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa-kube-api-access-ctqbd\") pod \"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa\" (UID: \"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa\") " Sep 30 00:55:22 crc kubenswrapper[4809]: I0930 00:55:22.668883 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa-kube-api-access-ctqbd" (OuterVolumeSpecName: "kube-api-access-ctqbd") pod "6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa" (UID: "6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa"). InnerVolumeSpecName "kube-api-access-ctqbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:55:22 crc kubenswrapper[4809]: I0930 00:55:22.675569 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa-utilities" (OuterVolumeSpecName: "utilities") pod "6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa" (UID: "6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:55:22 crc kubenswrapper[4809]: I0930 00:55:22.677461 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa" (UID: "6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:55:22 crc kubenswrapper[4809]: I0930 00:55:22.762211 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctqbd\" (UniqueName: \"kubernetes.io/projected/6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa-kube-api-access-ctqbd\") on node \"crc\" DevicePath \"\"" Sep 30 00:55:22 crc kubenswrapper[4809]: I0930 00:55:22.762261 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:55:22 crc kubenswrapper[4809]: I0930 00:55:22.762278 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:55:23 crc kubenswrapper[4809]: I0930 00:55:23.102439 4809 generic.go:334] "Generic (PLEG): container finished" podID="6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa" containerID="43fb50409117781743746aea1afcd0cce616d5c4379d8271d701053a8e3d0c25" exitCode=0 Sep 30 00:55:23 crc kubenswrapper[4809]: I0930 00:55:23.103485 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ncmpj" event={"ID":"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa","Type":"ContainerDied","Data":"43fb50409117781743746aea1afcd0cce616d5c4379d8271d701053a8e3d0c25"} Sep 30 00:55:23 crc kubenswrapper[4809]: I0930 00:55:23.103607 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ncmpj" event={"ID":"6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa","Type":"ContainerDied","Data":"b4efe1942438cadf81419701ac54e376f6f48b2aecfb040f54dc268e1c8cd1c4"} Sep 30 00:55:23 crc kubenswrapper[4809]: I0930 00:55:23.103782 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ncmpj" Sep 30 00:55:23 crc kubenswrapper[4809]: I0930 00:55:23.103797 4809 scope.go:117] "RemoveContainer" containerID="43fb50409117781743746aea1afcd0cce616d5c4379d8271d701053a8e3d0c25" Sep 30 00:55:23 crc kubenswrapper[4809]: I0930 00:55:23.137630 4809 scope.go:117] "RemoveContainer" containerID="c3671eb16af64dad661a2d09cb34f4888cf050acc924326c7c3f742b981a40a2" Sep 30 00:55:23 crc kubenswrapper[4809]: I0930 00:55:23.197721 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ncmpj"] Sep 30 00:55:23 crc kubenswrapper[4809]: I0930 00:55:23.206379 4809 scope.go:117] "RemoveContainer" containerID="f92bdec5b5190d405a6bb6531a67debfae9f3b8784f50799766970378d95c2a3" Sep 30 00:55:23 crc kubenswrapper[4809]: I0930 00:55:23.221046 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ncmpj"] Sep 30 00:55:23 crc kubenswrapper[4809]: E0930 00:55:23.221726 4809 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fc2d6d6_ae6b_4dc2_9445_1c9d493971aa.slice\": RecentStats: unable to find data in memory cache]" Sep 30 00:55:23 crc kubenswrapper[4809]: I0930 00:55:23.237819 4809 scope.go:117] "RemoveContainer" containerID="43fb50409117781743746aea1afcd0cce616d5c4379d8271d701053a8e3d0c25" Sep 30 00:55:23 crc kubenswrapper[4809]: E0930 00:55:23.238281 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43fb50409117781743746aea1afcd0cce616d5c4379d8271d701053a8e3d0c25\": container with ID starting with 43fb50409117781743746aea1afcd0cce616d5c4379d8271d701053a8e3d0c25 not found: ID does not exist" containerID="43fb50409117781743746aea1afcd0cce616d5c4379d8271d701053a8e3d0c25" Sep 30 00:55:23 crc kubenswrapper[4809]: I0930 00:55:23.238320 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43fb50409117781743746aea1afcd0cce616d5c4379d8271d701053a8e3d0c25"} err="failed to get container status \"43fb50409117781743746aea1afcd0cce616d5c4379d8271d701053a8e3d0c25\": rpc error: code = NotFound desc = could not find container \"43fb50409117781743746aea1afcd0cce616d5c4379d8271d701053a8e3d0c25\": container with ID starting with 43fb50409117781743746aea1afcd0cce616d5c4379d8271d701053a8e3d0c25 not found: ID does not exist" Sep 30 00:55:23 crc kubenswrapper[4809]: I0930 00:55:23.238345 4809 scope.go:117] "RemoveContainer" containerID="c3671eb16af64dad661a2d09cb34f4888cf050acc924326c7c3f742b981a40a2" Sep 30 00:55:23 crc kubenswrapper[4809]: E0930 00:55:23.238732 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3671eb16af64dad661a2d09cb34f4888cf050acc924326c7c3f742b981a40a2\": container with ID starting with c3671eb16af64dad661a2d09cb34f4888cf050acc924326c7c3f742b981a40a2 not found: ID does not exist" containerID="c3671eb16af64dad661a2d09cb34f4888cf050acc924326c7c3f742b981a40a2" Sep 30 00:55:23 crc kubenswrapper[4809]: I0930 00:55:23.238756 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3671eb16af64dad661a2d09cb34f4888cf050acc924326c7c3f742b981a40a2"} err="failed to get container status \"c3671eb16af64dad661a2d09cb34f4888cf050acc924326c7c3f742b981a40a2\": rpc error: code = NotFound desc = could not find container \"c3671eb16af64dad661a2d09cb34f4888cf050acc924326c7c3f742b981a40a2\": container with ID starting with c3671eb16af64dad661a2d09cb34f4888cf050acc924326c7c3f742b981a40a2 not found: ID does not exist" Sep 30 00:55:23 crc kubenswrapper[4809]: I0930 00:55:23.238771 4809 scope.go:117] "RemoveContainer" containerID="f92bdec5b5190d405a6bb6531a67debfae9f3b8784f50799766970378d95c2a3" Sep 30 00:55:23 crc kubenswrapper[4809]: E0930 00:55:23.239805 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f92bdec5b5190d405a6bb6531a67debfae9f3b8784f50799766970378d95c2a3\": container with ID starting with f92bdec5b5190d405a6bb6531a67debfae9f3b8784f50799766970378d95c2a3 not found: ID does not exist" containerID="f92bdec5b5190d405a6bb6531a67debfae9f3b8784f50799766970378d95c2a3" Sep 30 00:55:23 crc kubenswrapper[4809]: I0930 00:55:23.239826 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f92bdec5b5190d405a6bb6531a67debfae9f3b8784f50799766970378d95c2a3"} err="failed to get container status \"f92bdec5b5190d405a6bb6531a67debfae9f3b8784f50799766970378d95c2a3\": rpc error: code = NotFound desc = could not find container \"f92bdec5b5190d405a6bb6531a67debfae9f3b8784f50799766970378d95c2a3\": container with ID starting with f92bdec5b5190d405a6bb6531a67debfae9f3b8784f50799766970378d95c2a3 not found: ID does not exist" Sep 30 00:55:23 crc kubenswrapper[4809]: I0930 00:55:23.706884 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa" path="/var/lib/kubelet/pods/6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa/volumes" Sep 30 00:56:40 crc kubenswrapper[4809]: I0930 00:56:40.254380 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2hl7f"] Sep 30 00:56:40 crc kubenswrapper[4809]: E0930 00:56:40.255501 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa" containerName="extract-content" Sep 30 00:56:40 crc kubenswrapper[4809]: I0930 00:56:40.255519 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa" containerName="extract-content" Sep 30 00:56:40 crc kubenswrapper[4809]: E0930 00:56:40.255548 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa" containerName="registry-server" Sep 30 00:56:40 crc kubenswrapper[4809]: I0930 00:56:40.255559 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa" containerName="registry-server" Sep 30 00:56:40 crc kubenswrapper[4809]: E0930 00:56:40.255607 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa" containerName="extract-utilities" Sep 30 00:56:40 crc kubenswrapper[4809]: I0930 00:56:40.255615 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa" containerName="extract-utilities" Sep 30 00:56:40 crc kubenswrapper[4809]: I0930 00:56:40.255914 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fc2d6d6-ae6b-4dc2-9445-1c9d493971aa" containerName="registry-server" Sep 30 00:56:40 crc kubenswrapper[4809]: I0930 00:56:40.257836 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2hl7f" Sep 30 00:56:40 crc kubenswrapper[4809]: I0930 00:56:40.292894 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2hl7f"] Sep 30 00:56:40 crc kubenswrapper[4809]: I0930 00:56:40.329001 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nh57\" (UniqueName: \"kubernetes.io/projected/e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f-kube-api-access-5nh57\") pod \"community-operators-2hl7f\" (UID: \"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f\") " pod="openshift-marketplace/community-operators-2hl7f" Sep 30 00:56:40 crc kubenswrapper[4809]: I0930 00:56:40.329242 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f-catalog-content\") pod \"community-operators-2hl7f\" (UID: \"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f\") " pod="openshift-marketplace/community-operators-2hl7f" Sep 30 00:56:40 crc kubenswrapper[4809]: I0930 00:56:40.329291 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f-utilities\") pod \"community-operators-2hl7f\" (UID: \"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f\") " pod="openshift-marketplace/community-operators-2hl7f" Sep 30 00:56:40 crc kubenswrapper[4809]: I0930 00:56:40.431464 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f-catalog-content\") pod \"community-operators-2hl7f\" (UID: \"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f\") " pod="openshift-marketplace/community-operators-2hl7f" Sep 30 00:56:40 crc kubenswrapper[4809]: I0930 00:56:40.431535 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f-utilities\") pod \"community-operators-2hl7f\" (UID: \"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f\") " pod="openshift-marketplace/community-operators-2hl7f" Sep 30 00:56:40 crc kubenswrapper[4809]: I0930 00:56:40.431695 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nh57\" (UniqueName: \"kubernetes.io/projected/e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f-kube-api-access-5nh57\") pod \"community-operators-2hl7f\" (UID: \"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f\") " pod="openshift-marketplace/community-operators-2hl7f" Sep 30 00:56:40 crc kubenswrapper[4809]: I0930 00:56:40.432100 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f-utilities\") pod \"community-operators-2hl7f\" (UID: \"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f\") " pod="openshift-marketplace/community-operators-2hl7f" Sep 30 00:56:40 crc kubenswrapper[4809]: I0930 00:56:40.432334 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f-catalog-content\") pod \"community-operators-2hl7f\" (UID: \"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f\") " pod="openshift-marketplace/community-operators-2hl7f" Sep 30 00:56:40 crc kubenswrapper[4809]: I0930 00:56:40.452366 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nh57\" (UniqueName: \"kubernetes.io/projected/e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f-kube-api-access-5nh57\") pod \"community-operators-2hl7f\" (UID: \"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f\") " pod="openshift-marketplace/community-operators-2hl7f" Sep 30 00:56:40 crc kubenswrapper[4809]: I0930 00:56:40.588223 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2hl7f" Sep 30 00:56:41 crc kubenswrapper[4809]: I0930 00:56:41.185994 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2hl7f"] Sep 30 00:56:41 crc kubenswrapper[4809]: W0930 00:56:41.193594 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode6d8ad5d_0c82_4ff1_9b66_729e64fa1c4f.slice/crio-b6a24e7f21b46a65cad8e13645789277e5b22a39776ee6b0952398d81d638f23 WatchSource:0}: Error finding container b6a24e7f21b46a65cad8e13645789277e5b22a39776ee6b0952398d81d638f23: Status 404 returned error can't find the container with id b6a24e7f21b46a65cad8e13645789277e5b22a39776ee6b0952398d81d638f23 Sep 30 00:56:42 crc kubenswrapper[4809]: I0930 00:56:42.150011 4809 generic.go:334] "Generic (PLEG): container finished" podID="e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f" containerID="5daa8b63de52b098208816a9b5a5a9e989e4f3447a2d64bf1120895e23fcc520" exitCode=0 Sep 30 00:56:42 crc kubenswrapper[4809]: I0930 00:56:42.150081 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2hl7f" event={"ID":"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f","Type":"ContainerDied","Data":"5daa8b63de52b098208816a9b5a5a9e989e4f3447a2d64bf1120895e23fcc520"} Sep 30 00:56:42 crc kubenswrapper[4809]: I0930 00:56:42.150400 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2hl7f" event={"ID":"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f","Type":"ContainerStarted","Data":"b6a24e7f21b46a65cad8e13645789277e5b22a39776ee6b0952398d81d638f23"} Sep 30 00:56:42 crc kubenswrapper[4809]: I0930 00:56:42.152283 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 00:56:43 crc kubenswrapper[4809]: I0930 00:56:43.167985 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2hl7f" event={"ID":"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f","Type":"ContainerStarted","Data":"87747e957aedb2602100dd16e8210120d0360644186f9df8436ed994b78530f1"} Sep 30 00:56:45 crc kubenswrapper[4809]: I0930 00:56:45.189502 4809 generic.go:334] "Generic (PLEG): container finished" podID="e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f" containerID="87747e957aedb2602100dd16e8210120d0360644186f9df8436ed994b78530f1" exitCode=0 Sep 30 00:56:45 crc kubenswrapper[4809]: I0930 00:56:45.189585 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2hl7f" event={"ID":"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f","Type":"ContainerDied","Data":"87747e957aedb2602100dd16e8210120d0360644186f9df8436ed994b78530f1"} Sep 30 00:56:46 crc kubenswrapper[4809]: I0930 00:56:46.202774 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2hl7f" event={"ID":"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f","Type":"ContainerStarted","Data":"c546f2e346473e10062b535a8ff73ae7421b6adeca7fd37dcb11dc8b353e77bc"} Sep 30 00:56:46 crc kubenswrapper[4809]: I0930 00:56:46.250118 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2hl7f" podStartSLOduration=2.793645527 podStartE2EDuration="6.250093444s" podCreationTimestamp="2025-09-30 00:56:40 +0000 UTC" firstStartedPulling="2025-09-30 00:56:42.15196633 +0000 UTC m=+2853.188215748" lastFinishedPulling="2025-09-30 00:56:45.608414257 +0000 UTC m=+2856.644663665" observedRunningTime="2025-09-30 00:56:46.233968341 +0000 UTC m=+2857.270217739" watchObservedRunningTime="2025-09-30 00:56:46.250093444 +0000 UTC m=+2857.286342862" Sep 30 00:56:50 crc kubenswrapper[4809]: I0930 00:56:50.588765 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2hl7f" Sep 30 00:56:50 crc kubenswrapper[4809]: I0930 00:56:50.589487 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2hl7f" Sep 30 00:56:50 crc kubenswrapper[4809]: I0930 00:56:50.671563 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2hl7f" Sep 30 00:56:51 crc kubenswrapper[4809]: I0930 00:56:51.326887 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2hl7f" Sep 30 00:56:51 crc kubenswrapper[4809]: I0930 00:56:51.411108 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2hl7f"] Sep 30 00:56:53 crc kubenswrapper[4809]: I0930 00:56:53.290592 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2hl7f" podUID="e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f" containerName="registry-server" containerID="cri-o://c546f2e346473e10062b535a8ff73ae7421b6adeca7fd37dcb11dc8b353e77bc" gracePeriod=2 Sep 30 00:56:53 crc kubenswrapper[4809]: I0930 00:56:53.810469 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2hl7f" Sep 30 00:56:53 crc kubenswrapper[4809]: I0930 00:56:53.873941 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nh57\" (UniqueName: \"kubernetes.io/projected/e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f-kube-api-access-5nh57\") pod \"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f\" (UID: \"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f\") " Sep 30 00:56:53 crc kubenswrapper[4809]: I0930 00:56:53.874024 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f-catalog-content\") pod \"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f\" (UID: \"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f\") " Sep 30 00:56:53 crc kubenswrapper[4809]: I0930 00:56:53.874076 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f-utilities\") pod \"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f\" (UID: \"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f\") " Sep 30 00:56:53 crc kubenswrapper[4809]: I0930 00:56:53.874971 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f-utilities" (OuterVolumeSpecName: "utilities") pod "e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f" (UID: "e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:56:53 crc kubenswrapper[4809]: I0930 00:56:53.880667 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f-kube-api-access-5nh57" (OuterVolumeSpecName: "kube-api-access-5nh57") pod "e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f" (UID: "e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f"). InnerVolumeSpecName "kube-api-access-5nh57". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:56:53 crc kubenswrapper[4809]: I0930 00:56:53.930787 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f" (UID: "e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:56:53 crc kubenswrapper[4809]: I0930 00:56:53.977081 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nh57\" (UniqueName: \"kubernetes.io/projected/e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f-kube-api-access-5nh57\") on node \"crc\" DevicePath \"\"" Sep 30 00:56:53 crc kubenswrapper[4809]: I0930 00:56:53.977117 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:56:53 crc kubenswrapper[4809]: I0930 00:56:53.977126 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:56:54 crc kubenswrapper[4809]: I0930 00:56:54.306560 4809 generic.go:334] "Generic (PLEG): container finished" podID="e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f" containerID="c546f2e346473e10062b535a8ff73ae7421b6adeca7fd37dcb11dc8b353e77bc" exitCode=0 Sep 30 00:56:54 crc kubenswrapper[4809]: I0930 00:56:54.306714 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2hl7f" Sep 30 00:56:54 crc kubenswrapper[4809]: I0930 00:56:54.306619 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2hl7f" event={"ID":"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f","Type":"ContainerDied","Data":"c546f2e346473e10062b535a8ff73ae7421b6adeca7fd37dcb11dc8b353e77bc"} Sep 30 00:56:54 crc kubenswrapper[4809]: I0930 00:56:54.306815 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2hl7f" event={"ID":"e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f","Type":"ContainerDied","Data":"b6a24e7f21b46a65cad8e13645789277e5b22a39776ee6b0952398d81d638f23"} Sep 30 00:56:54 crc kubenswrapper[4809]: I0930 00:56:54.306857 4809 scope.go:117] "RemoveContainer" containerID="c546f2e346473e10062b535a8ff73ae7421b6adeca7fd37dcb11dc8b353e77bc" Sep 30 00:56:54 crc kubenswrapper[4809]: I0930 00:56:54.348690 4809 scope.go:117] "RemoveContainer" containerID="87747e957aedb2602100dd16e8210120d0360644186f9df8436ed994b78530f1" Sep 30 00:56:54 crc kubenswrapper[4809]: I0930 00:56:54.367187 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2hl7f"] Sep 30 00:56:54 crc kubenswrapper[4809]: I0930 00:56:54.392092 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2hl7f"] Sep 30 00:56:54 crc kubenswrapper[4809]: I0930 00:56:54.396215 4809 scope.go:117] "RemoveContainer" containerID="5daa8b63de52b098208816a9b5a5a9e989e4f3447a2d64bf1120895e23fcc520" Sep 30 00:56:54 crc kubenswrapper[4809]: I0930 00:56:54.476231 4809 scope.go:117] "RemoveContainer" containerID="c546f2e346473e10062b535a8ff73ae7421b6adeca7fd37dcb11dc8b353e77bc" Sep 30 00:56:54 crc kubenswrapper[4809]: E0930 00:56:54.476785 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c546f2e346473e10062b535a8ff73ae7421b6adeca7fd37dcb11dc8b353e77bc\": container with ID starting with c546f2e346473e10062b535a8ff73ae7421b6adeca7fd37dcb11dc8b353e77bc not found: ID does not exist" containerID="c546f2e346473e10062b535a8ff73ae7421b6adeca7fd37dcb11dc8b353e77bc" Sep 30 00:56:54 crc kubenswrapper[4809]: I0930 00:56:54.476829 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c546f2e346473e10062b535a8ff73ae7421b6adeca7fd37dcb11dc8b353e77bc"} err="failed to get container status \"c546f2e346473e10062b535a8ff73ae7421b6adeca7fd37dcb11dc8b353e77bc\": rpc error: code = NotFound desc = could not find container \"c546f2e346473e10062b535a8ff73ae7421b6adeca7fd37dcb11dc8b353e77bc\": container with ID starting with c546f2e346473e10062b535a8ff73ae7421b6adeca7fd37dcb11dc8b353e77bc not found: ID does not exist" Sep 30 00:56:54 crc kubenswrapper[4809]: I0930 00:56:54.476855 4809 scope.go:117] "RemoveContainer" containerID="87747e957aedb2602100dd16e8210120d0360644186f9df8436ed994b78530f1" Sep 30 00:56:54 crc kubenswrapper[4809]: E0930 00:56:54.477277 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87747e957aedb2602100dd16e8210120d0360644186f9df8436ed994b78530f1\": container with ID starting with 87747e957aedb2602100dd16e8210120d0360644186f9df8436ed994b78530f1 not found: ID does not exist" containerID="87747e957aedb2602100dd16e8210120d0360644186f9df8436ed994b78530f1" Sep 30 00:56:54 crc kubenswrapper[4809]: I0930 00:56:54.477343 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87747e957aedb2602100dd16e8210120d0360644186f9df8436ed994b78530f1"} err="failed to get container status \"87747e957aedb2602100dd16e8210120d0360644186f9df8436ed994b78530f1\": rpc error: code = NotFound desc = could not find container \"87747e957aedb2602100dd16e8210120d0360644186f9df8436ed994b78530f1\": container with ID starting with 87747e957aedb2602100dd16e8210120d0360644186f9df8436ed994b78530f1 not found: ID does not exist" Sep 30 00:56:54 crc kubenswrapper[4809]: I0930 00:56:54.477378 4809 scope.go:117] "RemoveContainer" containerID="5daa8b63de52b098208816a9b5a5a9e989e4f3447a2d64bf1120895e23fcc520" Sep 30 00:56:54 crc kubenswrapper[4809]: E0930 00:56:54.478105 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5daa8b63de52b098208816a9b5a5a9e989e4f3447a2d64bf1120895e23fcc520\": container with ID starting with 5daa8b63de52b098208816a9b5a5a9e989e4f3447a2d64bf1120895e23fcc520 not found: ID does not exist" containerID="5daa8b63de52b098208816a9b5a5a9e989e4f3447a2d64bf1120895e23fcc520" Sep 30 00:56:54 crc kubenswrapper[4809]: I0930 00:56:54.478141 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5daa8b63de52b098208816a9b5a5a9e989e4f3447a2d64bf1120895e23fcc520"} err="failed to get container status \"5daa8b63de52b098208816a9b5a5a9e989e4f3447a2d64bf1120895e23fcc520\": rpc error: code = NotFound desc = could not find container \"5daa8b63de52b098208816a9b5a5a9e989e4f3447a2d64bf1120895e23fcc520\": container with ID starting with 5daa8b63de52b098208816a9b5a5a9e989e4f3447a2d64bf1120895e23fcc520 not found: ID does not exist" Sep 30 00:56:55 crc kubenswrapper[4809]: I0930 00:56:55.324925 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:56:55 crc kubenswrapper[4809]: I0930 00:56:55.325585 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:56:55 crc kubenswrapper[4809]: I0930 00:56:55.706705 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f" path="/var/lib/kubelet/pods/e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f/volumes" Sep 30 00:57:25 crc kubenswrapper[4809]: I0930 00:57:25.324583 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:57:25 crc kubenswrapper[4809]: I0930 00:57:25.325193 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:57:43 crc kubenswrapper[4809]: I0930 00:57:43.230520 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-h9zs4"] Sep 30 00:57:43 crc kubenswrapper[4809]: E0930 00:57:43.231302 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f" containerName="registry-server" Sep 30 00:57:43 crc kubenswrapper[4809]: I0930 00:57:43.231314 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f" containerName="registry-server" Sep 30 00:57:43 crc kubenswrapper[4809]: E0930 00:57:43.231341 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f" containerName="extract-utilities" Sep 30 00:57:43 crc kubenswrapper[4809]: I0930 00:57:43.231347 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f" containerName="extract-utilities" Sep 30 00:57:43 crc kubenswrapper[4809]: E0930 00:57:43.231371 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f" containerName="extract-content" Sep 30 00:57:43 crc kubenswrapper[4809]: I0930 00:57:43.231377 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f" containerName="extract-content" Sep 30 00:57:43 crc kubenswrapper[4809]: I0930 00:57:43.231581 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6d8ad5d-0c82-4ff1-9b66-729e64fa1c4f" containerName="registry-server" Sep 30 00:57:43 crc kubenswrapper[4809]: I0930 00:57:43.233020 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h9zs4" Sep 30 00:57:43 crc kubenswrapper[4809]: I0930 00:57:43.258401 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h9zs4"] Sep 30 00:57:43 crc kubenswrapper[4809]: I0930 00:57:43.297190 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gfxp\" (UniqueName: \"kubernetes.io/projected/69455890-6e14-45bf-bff7-2848b454c077-kube-api-access-6gfxp\") pod \"redhat-operators-h9zs4\" (UID: \"69455890-6e14-45bf-bff7-2848b454c077\") " pod="openshift-marketplace/redhat-operators-h9zs4" Sep 30 00:57:43 crc kubenswrapper[4809]: I0930 00:57:43.297253 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69455890-6e14-45bf-bff7-2848b454c077-catalog-content\") pod \"redhat-operators-h9zs4\" (UID: \"69455890-6e14-45bf-bff7-2848b454c077\") " pod="openshift-marketplace/redhat-operators-h9zs4" Sep 30 00:57:43 crc kubenswrapper[4809]: I0930 00:57:43.297382 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69455890-6e14-45bf-bff7-2848b454c077-utilities\") pod \"redhat-operators-h9zs4\" (UID: \"69455890-6e14-45bf-bff7-2848b454c077\") " pod="openshift-marketplace/redhat-operators-h9zs4" Sep 30 00:57:43 crc kubenswrapper[4809]: I0930 00:57:43.398987 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gfxp\" (UniqueName: \"kubernetes.io/projected/69455890-6e14-45bf-bff7-2848b454c077-kube-api-access-6gfxp\") pod \"redhat-operators-h9zs4\" (UID: \"69455890-6e14-45bf-bff7-2848b454c077\") " pod="openshift-marketplace/redhat-operators-h9zs4" Sep 30 00:57:43 crc kubenswrapper[4809]: I0930 00:57:43.399044 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69455890-6e14-45bf-bff7-2848b454c077-catalog-content\") pod \"redhat-operators-h9zs4\" (UID: \"69455890-6e14-45bf-bff7-2848b454c077\") " pod="openshift-marketplace/redhat-operators-h9zs4" Sep 30 00:57:43 crc kubenswrapper[4809]: I0930 00:57:43.399098 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69455890-6e14-45bf-bff7-2848b454c077-utilities\") pod \"redhat-operators-h9zs4\" (UID: \"69455890-6e14-45bf-bff7-2848b454c077\") " pod="openshift-marketplace/redhat-operators-h9zs4" Sep 30 00:57:43 crc kubenswrapper[4809]: I0930 00:57:43.399742 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69455890-6e14-45bf-bff7-2848b454c077-catalog-content\") pod \"redhat-operators-h9zs4\" (UID: \"69455890-6e14-45bf-bff7-2848b454c077\") " pod="openshift-marketplace/redhat-operators-h9zs4" Sep 30 00:57:43 crc kubenswrapper[4809]: I0930 00:57:43.399834 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69455890-6e14-45bf-bff7-2848b454c077-utilities\") pod \"redhat-operators-h9zs4\" (UID: \"69455890-6e14-45bf-bff7-2848b454c077\") " pod="openshift-marketplace/redhat-operators-h9zs4" Sep 30 00:57:43 crc kubenswrapper[4809]: I0930 00:57:43.422364 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gfxp\" (UniqueName: \"kubernetes.io/projected/69455890-6e14-45bf-bff7-2848b454c077-kube-api-access-6gfxp\") pod \"redhat-operators-h9zs4\" (UID: \"69455890-6e14-45bf-bff7-2848b454c077\") " pod="openshift-marketplace/redhat-operators-h9zs4" Sep 30 00:57:43 crc kubenswrapper[4809]: I0930 00:57:43.561223 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h9zs4" Sep 30 00:57:44 crc kubenswrapper[4809]: I0930 00:57:44.061718 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h9zs4"] Sep 30 00:57:44 crc kubenswrapper[4809]: I0930 00:57:44.928471 4809 generic.go:334] "Generic (PLEG): container finished" podID="69455890-6e14-45bf-bff7-2848b454c077" containerID="9de7a0066705f27e16de73904dbb37df16ef79aa6034fe11f1a1e437e07d8feb" exitCode=0 Sep 30 00:57:44 crc kubenswrapper[4809]: I0930 00:57:44.928604 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h9zs4" event={"ID":"69455890-6e14-45bf-bff7-2848b454c077","Type":"ContainerDied","Data":"9de7a0066705f27e16de73904dbb37df16ef79aa6034fe11f1a1e437e07d8feb"} Sep 30 00:57:44 crc kubenswrapper[4809]: I0930 00:57:44.929024 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h9zs4" event={"ID":"69455890-6e14-45bf-bff7-2848b454c077","Type":"ContainerStarted","Data":"039ad8729eafe0357ddcd9668504e33b60287b1c6a577a61a54c16392e1d6d41"} Sep 30 00:57:46 crc kubenswrapper[4809]: I0930 00:57:46.959584 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h9zs4" event={"ID":"69455890-6e14-45bf-bff7-2848b454c077","Type":"ContainerStarted","Data":"638bc00b7f640e0e31077f1e91a117a39a5081577464f5ed970ad4da0e295643"} Sep 30 00:57:50 crc kubenswrapper[4809]: I0930 00:57:50.004784 4809 generic.go:334] "Generic (PLEG): container finished" podID="69455890-6e14-45bf-bff7-2848b454c077" containerID="638bc00b7f640e0e31077f1e91a117a39a5081577464f5ed970ad4da0e295643" exitCode=0 Sep 30 00:57:50 crc kubenswrapper[4809]: I0930 00:57:50.005782 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h9zs4" event={"ID":"69455890-6e14-45bf-bff7-2848b454c077","Type":"ContainerDied","Data":"638bc00b7f640e0e31077f1e91a117a39a5081577464f5ed970ad4da0e295643"} Sep 30 00:57:51 crc kubenswrapper[4809]: I0930 00:57:51.020619 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h9zs4" event={"ID":"69455890-6e14-45bf-bff7-2848b454c077","Type":"ContainerStarted","Data":"b0b306662b9911241c4a504c996667519e66e1db14a110c3af5132ee0c96fc59"} Sep 30 00:57:51 crc kubenswrapper[4809]: I0930 00:57:51.058339 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-h9zs4" podStartSLOduration=2.526267767 podStartE2EDuration="8.058317614s" podCreationTimestamp="2025-09-30 00:57:43 +0000 UTC" firstStartedPulling="2025-09-30 00:57:44.93293036 +0000 UTC m=+2915.969179768" lastFinishedPulling="2025-09-30 00:57:50.464980167 +0000 UTC m=+2921.501229615" observedRunningTime="2025-09-30 00:57:51.050976503 +0000 UTC m=+2922.087225921" watchObservedRunningTime="2025-09-30 00:57:51.058317614 +0000 UTC m=+2922.094567032" Sep 30 00:57:53 crc kubenswrapper[4809]: I0930 00:57:53.562240 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-h9zs4" Sep 30 00:57:53 crc kubenswrapper[4809]: I0930 00:57:53.562622 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-h9zs4" Sep 30 00:57:54 crc kubenswrapper[4809]: I0930 00:57:54.630080 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-h9zs4" podUID="69455890-6e14-45bf-bff7-2848b454c077" containerName="registry-server" probeResult="failure" output=< Sep 30 00:57:54 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 00:57:54 crc kubenswrapper[4809]: > Sep 30 00:57:55 crc kubenswrapper[4809]: I0930 00:57:55.324885 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:57:55 crc kubenswrapper[4809]: I0930 00:57:55.325000 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:57:55 crc kubenswrapper[4809]: I0930 00:57:55.325070 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 00:57:55 crc kubenswrapper[4809]: I0930 00:57:55.326054 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:57:55 crc kubenswrapper[4809]: I0930 00:57:55.326127 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" gracePeriod=600 Sep 30 00:57:55 crc kubenswrapper[4809]: E0930 00:57:55.474993 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:57:56 crc kubenswrapper[4809]: I0930 00:57:56.086109 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" exitCode=0 Sep 30 00:57:56 crc kubenswrapper[4809]: I0930 00:57:56.086183 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9"} Sep 30 00:57:56 crc kubenswrapper[4809]: I0930 00:57:56.086243 4809 scope.go:117] "RemoveContainer" containerID="038b8c1d2776a46501753a414c77ba4b88fa694557870a9e8ef4fc83907e99ed" Sep 30 00:57:56 crc kubenswrapper[4809]: I0930 00:57:56.087852 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 00:57:56 crc kubenswrapper[4809]: E0930 00:57:56.088767 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:58:03 crc kubenswrapper[4809]: I0930 00:58:03.623979 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-h9zs4" Sep 30 00:58:03 crc kubenswrapper[4809]: I0930 00:58:03.703999 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-h9zs4" Sep 30 00:58:03 crc kubenswrapper[4809]: I0930 00:58:03.879902 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h9zs4"] Sep 30 00:58:05 crc kubenswrapper[4809]: I0930 00:58:05.205782 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-h9zs4" podUID="69455890-6e14-45bf-bff7-2848b454c077" containerName="registry-server" containerID="cri-o://b0b306662b9911241c4a504c996667519e66e1db14a110c3af5132ee0c96fc59" gracePeriod=2 Sep 30 00:58:05 crc kubenswrapper[4809]: I0930 00:58:05.777900 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h9zs4" Sep 30 00:58:05 crc kubenswrapper[4809]: I0930 00:58:05.940987 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6gfxp\" (UniqueName: \"kubernetes.io/projected/69455890-6e14-45bf-bff7-2848b454c077-kube-api-access-6gfxp\") pod \"69455890-6e14-45bf-bff7-2848b454c077\" (UID: \"69455890-6e14-45bf-bff7-2848b454c077\") " Sep 30 00:58:05 crc kubenswrapper[4809]: I0930 00:58:05.941102 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69455890-6e14-45bf-bff7-2848b454c077-catalog-content\") pod \"69455890-6e14-45bf-bff7-2848b454c077\" (UID: \"69455890-6e14-45bf-bff7-2848b454c077\") " Sep 30 00:58:05 crc kubenswrapper[4809]: I0930 00:58:05.941275 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69455890-6e14-45bf-bff7-2848b454c077-utilities\") pod \"69455890-6e14-45bf-bff7-2848b454c077\" (UID: \"69455890-6e14-45bf-bff7-2848b454c077\") " Sep 30 00:58:05 crc kubenswrapper[4809]: I0930 00:58:05.942235 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69455890-6e14-45bf-bff7-2848b454c077-utilities" (OuterVolumeSpecName: "utilities") pod "69455890-6e14-45bf-bff7-2848b454c077" (UID: "69455890-6e14-45bf-bff7-2848b454c077"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:58:05 crc kubenswrapper[4809]: I0930 00:58:05.948951 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69455890-6e14-45bf-bff7-2848b454c077-kube-api-access-6gfxp" (OuterVolumeSpecName: "kube-api-access-6gfxp") pod "69455890-6e14-45bf-bff7-2848b454c077" (UID: "69455890-6e14-45bf-bff7-2848b454c077"). InnerVolumeSpecName "kube-api-access-6gfxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:58:06 crc kubenswrapper[4809]: I0930 00:58:06.043163 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69455890-6e14-45bf-bff7-2848b454c077-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:58:06 crc kubenswrapper[4809]: I0930 00:58:06.043196 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6gfxp\" (UniqueName: \"kubernetes.io/projected/69455890-6e14-45bf-bff7-2848b454c077-kube-api-access-6gfxp\") on node \"crc\" DevicePath \"\"" Sep 30 00:58:06 crc kubenswrapper[4809]: I0930 00:58:06.058278 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69455890-6e14-45bf-bff7-2848b454c077-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "69455890-6e14-45bf-bff7-2848b454c077" (UID: "69455890-6e14-45bf-bff7-2848b454c077"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:58:06 crc kubenswrapper[4809]: I0930 00:58:06.145415 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69455890-6e14-45bf-bff7-2848b454c077-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:58:06 crc kubenswrapper[4809]: I0930 00:58:06.219867 4809 generic.go:334] "Generic (PLEG): container finished" podID="69455890-6e14-45bf-bff7-2848b454c077" containerID="b0b306662b9911241c4a504c996667519e66e1db14a110c3af5132ee0c96fc59" exitCode=0 Sep 30 00:58:06 crc kubenswrapper[4809]: I0930 00:58:06.219931 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h9zs4" event={"ID":"69455890-6e14-45bf-bff7-2848b454c077","Type":"ContainerDied","Data":"b0b306662b9911241c4a504c996667519e66e1db14a110c3af5132ee0c96fc59"} Sep 30 00:58:06 crc kubenswrapper[4809]: I0930 00:58:06.219958 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h9zs4" event={"ID":"69455890-6e14-45bf-bff7-2848b454c077","Type":"ContainerDied","Data":"039ad8729eafe0357ddcd9668504e33b60287b1c6a577a61a54c16392e1d6d41"} Sep 30 00:58:06 crc kubenswrapper[4809]: I0930 00:58:06.219998 4809 scope.go:117] "RemoveContainer" containerID="b0b306662b9911241c4a504c996667519e66e1db14a110c3af5132ee0c96fc59" Sep 30 00:58:06 crc kubenswrapper[4809]: I0930 00:58:06.220032 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h9zs4" Sep 30 00:58:06 crc kubenswrapper[4809]: I0930 00:58:06.250821 4809 scope.go:117] "RemoveContainer" containerID="638bc00b7f640e0e31077f1e91a117a39a5081577464f5ed970ad4da0e295643" Sep 30 00:58:06 crc kubenswrapper[4809]: I0930 00:58:06.291671 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h9zs4"] Sep 30 00:58:06 crc kubenswrapper[4809]: I0930 00:58:06.298056 4809 scope.go:117] "RemoveContainer" containerID="9de7a0066705f27e16de73904dbb37df16ef79aa6034fe11f1a1e437e07d8feb" Sep 30 00:58:06 crc kubenswrapper[4809]: I0930 00:58:06.302593 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-h9zs4"] Sep 30 00:58:06 crc kubenswrapper[4809]: I0930 00:58:06.360201 4809 scope.go:117] "RemoveContainer" containerID="b0b306662b9911241c4a504c996667519e66e1db14a110c3af5132ee0c96fc59" Sep 30 00:58:06 crc kubenswrapper[4809]: E0930 00:58:06.360891 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0b306662b9911241c4a504c996667519e66e1db14a110c3af5132ee0c96fc59\": container with ID starting with b0b306662b9911241c4a504c996667519e66e1db14a110c3af5132ee0c96fc59 not found: ID does not exist" containerID="b0b306662b9911241c4a504c996667519e66e1db14a110c3af5132ee0c96fc59" Sep 30 00:58:06 crc kubenswrapper[4809]: I0930 00:58:06.360971 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0b306662b9911241c4a504c996667519e66e1db14a110c3af5132ee0c96fc59"} err="failed to get container status \"b0b306662b9911241c4a504c996667519e66e1db14a110c3af5132ee0c96fc59\": rpc error: code = NotFound desc = could not find container \"b0b306662b9911241c4a504c996667519e66e1db14a110c3af5132ee0c96fc59\": container with ID starting with b0b306662b9911241c4a504c996667519e66e1db14a110c3af5132ee0c96fc59 not found: ID does not exist" Sep 30 00:58:06 crc kubenswrapper[4809]: I0930 00:58:06.361021 4809 scope.go:117] "RemoveContainer" containerID="638bc00b7f640e0e31077f1e91a117a39a5081577464f5ed970ad4da0e295643" Sep 30 00:58:06 crc kubenswrapper[4809]: E0930 00:58:06.361561 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"638bc00b7f640e0e31077f1e91a117a39a5081577464f5ed970ad4da0e295643\": container with ID starting with 638bc00b7f640e0e31077f1e91a117a39a5081577464f5ed970ad4da0e295643 not found: ID does not exist" containerID="638bc00b7f640e0e31077f1e91a117a39a5081577464f5ed970ad4da0e295643" Sep 30 00:58:06 crc kubenswrapper[4809]: I0930 00:58:06.361685 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"638bc00b7f640e0e31077f1e91a117a39a5081577464f5ed970ad4da0e295643"} err="failed to get container status \"638bc00b7f640e0e31077f1e91a117a39a5081577464f5ed970ad4da0e295643\": rpc error: code = NotFound desc = could not find container \"638bc00b7f640e0e31077f1e91a117a39a5081577464f5ed970ad4da0e295643\": container with ID starting with 638bc00b7f640e0e31077f1e91a117a39a5081577464f5ed970ad4da0e295643 not found: ID does not exist" Sep 30 00:58:06 crc kubenswrapper[4809]: I0930 00:58:06.361765 4809 scope.go:117] "RemoveContainer" containerID="9de7a0066705f27e16de73904dbb37df16ef79aa6034fe11f1a1e437e07d8feb" Sep 30 00:58:06 crc kubenswrapper[4809]: E0930 00:58:06.362084 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9de7a0066705f27e16de73904dbb37df16ef79aa6034fe11f1a1e437e07d8feb\": container with ID starting with 9de7a0066705f27e16de73904dbb37df16ef79aa6034fe11f1a1e437e07d8feb not found: ID does not exist" containerID="9de7a0066705f27e16de73904dbb37df16ef79aa6034fe11f1a1e437e07d8feb" Sep 30 00:58:06 crc kubenswrapper[4809]: I0930 00:58:06.362117 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9de7a0066705f27e16de73904dbb37df16ef79aa6034fe11f1a1e437e07d8feb"} err="failed to get container status \"9de7a0066705f27e16de73904dbb37df16ef79aa6034fe11f1a1e437e07d8feb\": rpc error: code = NotFound desc = could not find container \"9de7a0066705f27e16de73904dbb37df16ef79aa6034fe11f1a1e437e07d8feb\": container with ID starting with 9de7a0066705f27e16de73904dbb37df16ef79aa6034fe11f1a1e437e07d8feb not found: ID does not exist" Sep 30 00:58:07 crc kubenswrapper[4809]: I0930 00:58:07.713484 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69455890-6e14-45bf-bff7-2848b454c077" path="/var/lib/kubelet/pods/69455890-6e14-45bf-bff7-2848b454c077/volumes" Sep 30 00:58:08 crc kubenswrapper[4809]: I0930 00:58:08.690963 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 00:58:08 crc kubenswrapper[4809]: E0930 00:58:08.691472 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:58:23 crc kubenswrapper[4809]: I0930 00:58:23.691028 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 00:58:23 crc kubenswrapper[4809]: E0930 00:58:23.692209 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:58:37 crc kubenswrapper[4809]: I0930 00:58:37.691157 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 00:58:37 crc kubenswrapper[4809]: E0930 00:58:37.691903 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:58:49 crc kubenswrapper[4809]: I0930 00:58:49.699476 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 00:58:49 crc kubenswrapper[4809]: E0930 00:58:49.700740 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:59:03 crc kubenswrapper[4809]: I0930 00:59:03.690449 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 00:59:03 crc kubenswrapper[4809]: E0930 00:59:03.691149 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:59:16 crc kubenswrapper[4809]: I0930 00:59:16.691456 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 00:59:16 crc kubenswrapper[4809]: E0930 00:59:16.692405 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:59:29 crc kubenswrapper[4809]: I0930 00:59:29.707103 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 00:59:29 crc kubenswrapper[4809]: E0930 00:59:29.709029 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:59:42 crc kubenswrapper[4809]: I0930 00:59:42.691359 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 00:59:42 crc kubenswrapper[4809]: E0930 00:59:42.692309 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.437105 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.447353 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.456586 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jzklt"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.467347 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.478282 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.489103 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.500330 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/logging-edpm-deployment-openstack-edpm-ipam-ds6tz"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.511400 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.520309 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-rskbg"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.528243 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-chbvd"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.535308 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-v2zzx"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.542798 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-wqsx4"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.550248 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.557971 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-ndvjr"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.565315 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.573024 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.579865 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m8z2d"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.586750 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9g5l2"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.593687 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.600602 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.607457 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.614141 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.621025 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.629110 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-m9vlh"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.641213 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.650344 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-57n5p"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.655666 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-k2gl9"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.662192 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-swq2j"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.668397 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-jwhf6"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.675043 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-qq4xp"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.681316 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-tdgd5"] Sep 30 00:59:44 crc kubenswrapper[4809]: I0930 00:59:44.687949 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-k2gl9"] Sep 30 00:59:45 crc kubenswrapper[4809]: I0930 00:59:45.707986 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1469fb07-9396-4b4f-afbd-d50ced38d02d" path="/var/lib/kubelet/pods/1469fb07-9396-4b4f-afbd-d50ced38d02d/volumes" Sep 30 00:59:45 crc kubenswrapper[4809]: I0930 00:59:45.709521 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b62f711-5eaf-4f0c-b2aa-e29f893a1871" path="/var/lib/kubelet/pods/1b62f711-5eaf-4f0c-b2aa-e29f893a1871/volumes" Sep 30 00:59:45 crc kubenswrapper[4809]: I0930 00:59:45.710916 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2300c556-cca3-451c-bbf5-1f1dd17a3d41" path="/var/lib/kubelet/pods/2300c556-cca3-451c-bbf5-1f1dd17a3d41/volumes" Sep 30 00:59:45 crc kubenswrapper[4809]: I0930 00:59:45.712071 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2cdfd6c1-789a-484b-b8bc-d225b4690da5" path="/var/lib/kubelet/pods/2cdfd6c1-789a-484b-b8bc-d225b4690da5/volumes" Sep 30 00:59:45 crc kubenswrapper[4809]: I0930 00:59:45.714467 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e6d97aa-36d0-45f9-ae52-b14487f4cb5d" path="/var/lib/kubelet/pods/4e6d97aa-36d0-45f9-ae52-b14487f4cb5d/volumes" Sep 30 00:59:45 crc kubenswrapper[4809]: I0930 00:59:45.715721 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e5f0c82-2239-41df-886e-be2d3b59bc85" path="/var/lib/kubelet/pods/5e5f0c82-2239-41df-886e-be2d3b59bc85/volumes" Sep 30 00:59:45 crc kubenswrapper[4809]: I0930 00:59:45.716956 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63766b50-2bbd-4532-89a3-83cc3b063a52" path="/var/lib/kubelet/pods/63766b50-2bbd-4532-89a3-83cc3b063a52/volumes" Sep 30 00:59:45 crc kubenswrapper[4809]: I0930 00:59:45.718773 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="740f4755-38d2-4150-85b8-e69d10d305b8" path="/var/lib/kubelet/pods/740f4755-38d2-4150-85b8-e69d10d305b8/volumes" Sep 30 00:59:45 crc kubenswrapper[4809]: I0930 00:59:45.719579 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cec48e7-b769-4dc4-9e71-14a237f8aab8" path="/var/lib/kubelet/pods/8cec48e7-b769-4dc4-9e71-14a237f8aab8/volumes" Sep 30 00:59:45 crc kubenswrapper[4809]: I0930 00:59:45.720221 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92157a7e-beb8-4789-9818-325fa359b0d0" path="/var/lib/kubelet/pods/92157a7e-beb8-4789-9818-325fa359b0d0/volumes" Sep 30 00:59:45 crc kubenswrapper[4809]: I0930 00:59:45.720855 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a35eaeb-456c-42d9-bd9a-9eb736876d56" path="/var/lib/kubelet/pods/9a35eaeb-456c-42d9-bd9a-9eb736876d56/volumes" Sep 30 00:59:45 crc kubenswrapper[4809]: I0930 00:59:45.721953 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dc1b815-383d-4f16-a8db-cfa3e13a66f4" path="/var/lib/kubelet/pods/9dc1b815-383d-4f16-a8db-cfa3e13a66f4/volumes" Sep 30 00:59:45 crc kubenswrapper[4809]: I0930 00:59:45.722553 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a89a4bf7-7d14-4374-9de0-adf3c01c20e4" path="/var/lib/kubelet/pods/a89a4bf7-7d14-4374-9de0-adf3c01c20e4/volumes" Sep 30 00:59:45 crc kubenswrapper[4809]: I0930 00:59:45.723166 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a96249d1-892d-4100-b032-c805fc0f9cbb" path="/var/lib/kubelet/pods/a96249d1-892d-4100-b032-c805fc0f9cbb/volumes" Sep 30 00:59:45 crc kubenswrapper[4809]: I0930 00:59:45.724129 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca0c759d-ee70-41e7-b36d-0e1652aa1021" path="/var/lib/kubelet/pods/ca0c759d-ee70-41e7-b36d-0e1652aa1021/volumes" Sep 30 00:59:45 crc kubenswrapper[4809]: I0930 00:59:45.724714 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcf72aff-935b-4320-ac12-29bf554f224c" path="/var/lib/kubelet/pods/fcf72aff-935b-4320-ac12-29bf554f224c/volumes" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.302635 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl"] Sep 30 00:59:49 crc kubenswrapper[4809]: E0930 00:59:49.303617 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69455890-6e14-45bf-bff7-2848b454c077" containerName="extract-utilities" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.303631 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="69455890-6e14-45bf-bff7-2848b454c077" containerName="extract-utilities" Sep 30 00:59:49 crc kubenswrapper[4809]: E0930 00:59:49.303676 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69455890-6e14-45bf-bff7-2848b454c077" containerName="registry-server" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.303683 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="69455890-6e14-45bf-bff7-2848b454c077" containerName="registry-server" Sep 30 00:59:49 crc kubenswrapper[4809]: E0930 00:59:49.303709 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69455890-6e14-45bf-bff7-2848b454c077" containerName="extract-content" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.303718 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="69455890-6e14-45bf-bff7-2848b454c077" containerName="extract-content" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.303968 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="69455890-6e14-45bf-bff7-2848b454c077" containerName="registry-server" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.304723 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.306598 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.307417 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.308713 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.309138 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.314472 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.316944 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl"] Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.404107 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl\" (UID: \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.404219 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl\" (UID: \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.404318 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl\" (UID: \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.404353 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5z7gr\" (UniqueName: \"kubernetes.io/projected/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-kube-api-access-5z7gr\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl\" (UID: \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.404380 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl\" (UID: \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.506540 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl\" (UID: \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.506609 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5z7gr\" (UniqueName: \"kubernetes.io/projected/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-kube-api-access-5z7gr\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl\" (UID: \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.506654 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl\" (UID: \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.506709 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl\" (UID: \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.506764 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl\" (UID: \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.516593 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl\" (UID: \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.516659 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl\" (UID: \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.528095 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl\" (UID: \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.531213 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl\" (UID: \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.534624 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5z7gr\" (UniqueName: \"kubernetes.io/projected/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-kube-api-access-5z7gr\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl\" (UID: \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" Sep 30 00:59:49 crc kubenswrapper[4809]: I0930 00:59:49.626510 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" Sep 30 00:59:50 crc kubenswrapper[4809]: I0930 00:59:50.184241 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl"] Sep 30 00:59:50 crc kubenswrapper[4809]: W0930 00:59:50.189896 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26c07bd8_9cdc_4d44_82b9_d76f51b9ce27.slice/crio-834019e617ca9a12bc47de83359ffc49fdf658323bd9c45c69970db8022b8b2d WatchSource:0}: Error finding container 834019e617ca9a12bc47de83359ffc49fdf658323bd9c45c69970db8022b8b2d: Status 404 returned error can't find the container with id 834019e617ca9a12bc47de83359ffc49fdf658323bd9c45c69970db8022b8b2d Sep 30 00:59:50 crc kubenswrapper[4809]: I0930 00:59:50.559622 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" event={"ID":"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27","Type":"ContainerStarted","Data":"834019e617ca9a12bc47de83359ffc49fdf658323bd9c45c69970db8022b8b2d"} Sep 30 00:59:51 crc kubenswrapper[4809]: I0930 00:59:51.572766 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" event={"ID":"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27","Type":"ContainerStarted","Data":"fdf4a0716275c00be0f4480ab21d32295bb783a3ec1bfbfa5a42798b09cbd861"} Sep 30 00:59:51 crc kubenswrapper[4809]: I0930 00:59:51.596560 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" podStartSLOduration=2.185235216 podStartE2EDuration="2.596542493s" podCreationTimestamp="2025-09-30 00:59:49 +0000 UTC" firstStartedPulling="2025-09-30 00:59:50.191784856 +0000 UTC m=+3041.228034254" lastFinishedPulling="2025-09-30 00:59:50.603092123 +0000 UTC m=+3041.639341531" observedRunningTime="2025-09-30 00:59:51.587950478 +0000 UTC m=+3042.624199886" watchObservedRunningTime="2025-09-30 00:59:51.596542493 +0000 UTC m=+3042.632791891" Sep 30 00:59:53 crc kubenswrapper[4809]: I0930 00:59:53.695243 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 00:59:53 crc kubenswrapper[4809]: E0930 00:59:53.695836 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:00:00 crc kubenswrapper[4809]: I0930 01:00:00.156281 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r"] Sep 30 01:00:00 crc kubenswrapper[4809]: I0930 01:00:00.158841 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r" Sep 30 01:00:00 crc kubenswrapper[4809]: I0930 01:00:00.160600 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 01:00:00 crc kubenswrapper[4809]: I0930 01:00:00.163361 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 01:00:00 crc kubenswrapper[4809]: I0930 01:00:00.171312 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r"] Sep 30 01:00:00 crc kubenswrapper[4809]: I0930 01:00:00.305808 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5fafdae5-fcc8-4f5f-8d7b-6df239e86be5-config-volume\") pod \"collect-profiles-29319900-85j5r\" (UID: \"5fafdae5-fcc8-4f5f-8d7b-6df239e86be5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r" Sep 30 01:00:00 crc kubenswrapper[4809]: I0930 01:00:00.305948 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5fafdae5-fcc8-4f5f-8d7b-6df239e86be5-secret-volume\") pod \"collect-profiles-29319900-85j5r\" (UID: \"5fafdae5-fcc8-4f5f-8d7b-6df239e86be5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r" Sep 30 01:00:00 crc kubenswrapper[4809]: I0930 01:00:00.306194 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgztt\" (UniqueName: \"kubernetes.io/projected/5fafdae5-fcc8-4f5f-8d7b-6df239e86be5-kube-api-access-zgztt\") pod \"collect-profiles-29319900-85j5r\" (UID: \"5fafdae5-fcc8-4f5f-8d7b-6df239e86be5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r" Sep 30 01:00:00 crc kubenswrapper[4809]: I0930 01:00:00.408265 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5fafdae5-fcc8-4f5f-8d7b-6df239e86be5-secret-volume\") pod \"collect-profiles-29319900-85j5r\" (UID: \"5fafdae5-fcc8-4f5f-8d7b-6df239e86be5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r" Sep 30 01:00:00 crc kubenswrapper[4809]: I0930 01:00:00.408375 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgztt\" (UniqueName: \"kubernetes.io/projected/5fafdae5-fcc8-4f5f-8d7b-6df239e86be5-kube-api-access-zgztt\") pod \"collect-profiles-29319900-85j5r\" (UID: \"5fafdae5-fcc8-4f5f-8d7b-6df239e86be5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r" Sep 30 01:00:00 crc kubenswrapper[4809]: I0930 01:00:00.408452 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5fafdae5-fcc8-4f5f-8d7b-6df239e86be5-config-volume\") pod \"collect-profiles-29319900-85j5r\" (UID: \"5fafdae5-fcc8-4f5f-8d7b-6df239e86be5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r" Sep 30 01:00:00 crc kubenswrapper[4809]: I0930 01:00:00.409402 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5fafdae5-fcc8-4f5f-8d7b-6df239e86be5-config-volume\") pod \"collect-profiles-29319900-85j5r\" (UID: \"5fafdae5-fcc8-4f5f-8d7b-6df239e86be5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r" Sep 30 01:00:00 crc kubenswrapper[4809]: I0930 01:00:00.416615 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5fafdae5-fcc8-4f5f-8d7b-6df239e86be5-secret-volume\") pod \"collect-profiles-29319900-85j5r\" (UID: \"5fafdae5-fcc8-4f5f-8d7b-6df239e86be5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r" Sep 30 01:00:00 crc kubenswrapper[4809]: I0930 01:00:00.438408 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgztt\" (UniqueName: \"kubernetes.io/projected/5fafdae5-fcc8-4f5f-8d7b-6df239e86be5-kube-api-access-zgztt\") pod \"collect-profiles-29319900-85j5r\" (UID: \"5fafdae5-fcc8-4f5f-8d7b-6df239e86be5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r" Sep 30 01:00:00 crc kubenswrapper[4809]: I0930 01:00:00.480941 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r" Sep 30 01:00:01 crc kubenswrapper[4809]: I0930 01:00:01.018122 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r"] Sep 30 01:00:01 crc kubenswrapper[4809]: I0930 01:00:01.719735 4809 generic.go:334] "Generic (PLEG): container finished" podID="5fafdae5-fcc8-4f5f-8d7b-6df239e86be5" containerID="7dffcffae20f5546d1226bcd2fae51725deb6f04eb77fc89604bd970fb1da4b9" exitCode=0 Sep 30 01:00:01 crc kubenswrapper[4809]: I0930 01:00:01.720201 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r" event={"ID":"5fafdae5-fcc8-4f5f-8d7b-6df239e86be5","Type":"ContainerDied","Data":"7dffcffae20f5546d1226bcd2fae51725deb6f04eb77fc89604bd970fb1da4b9"} Sep 30 01:00:01 crc kubenswrapper[4809]: I0930 01:00:01.720850 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r" event={"ID":"5fafdae5-fcc8-4f5f-8d7b-6df239e86be5","Type":"ContainerStarted","Data":"a824e5f9f60b0c4bf1b5befe94f3d848b719e5324d95e9672ddecdbd97d717ef"} Sep 30 01:00:03 crc kubenswrapper[4809]: I0930 01:00:03.165236 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r" Sep 30 01:00:03 crc kubenswrapper[4809]: I0930 01:00:03.279280 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgztt\" (UniqueName: \"kubernetes.io/projected/5fafdae5-fcc8-4f5f-8d7b-6df239e86be5-kube-api-access-zgztt\") pod \"5fafdae5-fcc8-4f5f-8d7b-6df239e86be5\" (UID: \"5fafdae5-fcc8-4f5f-8d7b-6df239e86be5\") " Sep 30 01:00:03 crc kubenswrapper[4809]: I0930 01:00:03.279436 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5fafdae5-fcc8-4f5f-8d7b-6df239e86be5-secret-volume\") pod \"5fafdae5-fcc8-4f5f-8d7b-6df239e86be5\" (UID: \"5fafdae5-fcc8-4f5f-8d7b-6df239e86be5\") " Sep 30 01:00:03 crc kubenswrapper[4809]: I0930 01:00:03.279576 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5fafdae5-fcc8-4f5f-8d7b-6df239e86be5-config-volume\") pod \"5fafdae5-fcc8-4f5f-8d7b-6df239e86be5\" (UID: \"5fafdae5-fcc8-4f5f-8d7b-6df239e86be5\") " Sep 30 01:00:03 crc kubenswrapper[4809]: I0930 01:00:03.280911 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fafdae5-fcc8-4f5f-8d7b-6df239e86be5-config-volume" (OuterVolumeSpecName: "config-volume") pod "5fafdae5-fcc8-4f5f-8d7b-6df239e86be5" (UID: "5fafdae5-fcc8-4f5f-8d7b-6df239e86be5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:00:03 crc kubenswrapper[4809]: I0930 01:00:03.303041 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fafdae5-fcc8-4f5f-8d7b-6df239e86be5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5fafdae5-fcc8-4f5f-8d7b-6df239e86be5" (UID: "5fafdae5-fcc8-4f5f-8d7b-6df239e86be5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:00:03 crc kubenswrapper[4809]: I0930 01:00:03.324580 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fafdae5-fcc8-4f5f-8d7b-6df239e86be5-kube-api-access-zgztt" (OuterVolumeSpecName: "kube-api-access-zgztt") pod "5fafdae5-fcc8-4f5f-8d7b-6df239e86be5" (UID: "5fafdae5-fcc8-4f5f-8d7b-6df239e86be5"). InnerVolumeSpecName "kube-api-access-zgztt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:00:03 crc kubenswrapper[4809]: I0930 01:00:03.382731 4809 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5fafdae5-fcc8-4f5f-8d7b-6df239e86be5-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 01:00:03 crc kubenswrapper[4809]: I0930 01:00:03.382772 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgztt\" (UniqueName: \"kubernetes.io/projected/5fafdae5-fcc8-4f5f-8d7b-6df239e86be5-kube-api-access-zgztt\") on node \"crc\" DevicePath \"\"" Sep 30 01:00:03 crc kubenswrapper[4809]: I0930 01:00:03.382783 4809 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5fafdae5-fcc8-4f5f-8d7b-6df239e86be5-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 01:00:03 crc kubenswrapper[4809]: I0930 01:00:03.747409 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r" event={"ID":"5fafdae5-fcc8-4f5f-8d7b-6df239e86be5","Type":"ContainerDied","Data":"a824e5f9f60b0c4bf1b5befe94f3d848b719e5324d95e9672ddecdbd97d717ef"} Sep 30 01:00:03 crc kubenswrapper[4809]: I0930 01:00:03.747655 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a824e5f9f60b0c4bf1b5befe94f3d848b719e5324d95e9672ddecdbd97d717ef" Sep 30 01:00:03 crc kubenswrapper[4809]: I0930 01:00:03.747457 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r" Sep 30 01:00:04 crc kubenswrapper[4809]: I0930 01:00:04.256677 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6"] Sep 30 01:00:04 crc kubenswrapper[4809]: I0930 01:00:04.265560 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319855-n7sw6"] Sep 30 01:00:04 crc kubenswrapper[4809]: I0930 01:00:04.756413 4809 generic.go:334] "Generic (PLEG): container finished" podID="26c07bd8-9cdc-4d44-82b9-d76f51b9ce27" containerID="fdf4a0716275c00be0f4480ab21d32295bb783a3ec1bfbfa5a42798b09cbd861" exitCode=0 Sep 30 01:00:04 crc kubenswrapper[4809]: I0930 01:00:04.756509 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" event={"ID":"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27","Type":"ContainerDied","Data":"fdf4a0716275c00be0f4480ab21d32295bb783a3ec1bfbfa5a42798b09cbd861"} Sep 30 01:00:05 crc kubenswrapper[4809]: I0930 01:00:05.707796 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d75d9dbd-f149-4d41-86d6-c11dde89be6e" path="/var/lib/kubelet/pods/d75d9dbd-f149-4d41-86d6-c11dde89be6e/volumes" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.246834 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.444839 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5z7gr\" (UniqueName: \"kubernetes.io/projected/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-kube-api-access-5z7gr\") pod \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\" (UID: \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\") " Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.444917 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-repo-setup-combined-ca-bundle\") pod \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\" (UID: \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\") " Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.444980 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-ssh-key\") pod \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\" (UID: \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\") " Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.445056 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-ceph\") pod \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\" (UID: \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\") " Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.445187 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-inventory\") pod \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\" (UID: \"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27\") " Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.451235 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "26c07bd8-9cdc-4d44-82b9-d76f51b9ce27" (UID: "26c07bd8-9cdc-4d44-82b9-d76f51b9ce27"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.452161 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-ceph" (OuterVolumeSpecName: "ceph") pod "26c07bd8-9cdc-4d44-82b9-d76f51b9ce27" (UID: "26c07bd8-9cdc-4d44-82b9-d76f51b9ce27"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.455045 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-kube-api-access-5z7gr" (OuterVolumeSpecName: "kube-api-access-5z7gr") pod "26c07bd8-9cdc-4d44-82b9-d76f51b9ce27" (UID: "26c07bd8-9cdc-4d44-82b9-d76f51b9ce27"). InnerVolumeSpecName "kube-api-access-5z7gr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.477914 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "26c07bd8-9cdc-4d44-82b9-d76f51b9ce27" (UID: "26c07bd8-9cdc-4d44-82b9-d76f51b9ce27"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.485024 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-inventory" (OuterVolumeSpecName: "inventory") pod "26c07bd8-9cdc-4d44-82b9-d76f51b9ce27" (UID: "26c07bd8-9cdc-4d44-82b9-d76f51b9ce27"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.549594 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5z7gr\" (UniqueName: \"kubernetes.io/projected/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-kube-api-access-5z7gr\") on node \"crc\" DevicePath \"\"" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.549669 4809 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.549689 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.549710 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.549727 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/26c07bd8-9cdc-4d44-82b9-d76f51b9ce27-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.777427 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" event={"ID":"26c07bd8-9cdc-4d44-82b9-d76f51b9ce27","Type":"ContainerDied","Data":"834019e617ca9a12bc47de83359ffc49fdf658323bd9c45c69970db8022b8b2d"} Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.777464 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.777473 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="834019e617ca9a12bc47de83359ffc49fdf658323bd9c45c69970db8022b8b2d" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.857984 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489"] Sep 30 01:00:06 crc kubenswrapper[4809]: E0930 01:00:06.858832 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26c07bd8-9cdc-4d44-82b9-d76f51b9ce27" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.860948 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="26c07bd8-9cdc-4d44-82b9-d76f51b9ce27" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 01:00:06 crc kubenswrapper[4809]: E0930 01:00:06.861093 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fafdae5-fcc8-4f5f-8d7b-6df239e86be5" containerName="collect-profiles" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.861143 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fafdae5-fcc8-4f5f-8d7b-6df239e86be5" containerName="collect-profiles" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.861619 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fafdae5-fcc8-4f5f-8d7b-6df239e86be5" containerName="collect-profiles" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.861706 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="26c07bd8-9cdc-4d44-82b9-d76f51b9ce27" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.862509 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.864454 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.864657 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.864923 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.865081 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.865090 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 01:00:06 crc kubenswrapper[4809]: I0930 01:00:06.870701 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489"] Sep 30 01:00:07 crc kubenswrapper[4809]: I0930 01:00:07.059361 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4q489\" (UID: \"4c8e9701-fb27-481f-8572-e7f163487a92\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" Sep 30 01:00:07 crc kubenswrapper[4809]: I0930 01:00:07.059507 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4q489\" (UID: \"4c8e9701-fb27-481f-8572-e7f163487a92\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" Sep 30 01:00:07 crc kubenswrapper[4809]: I0930 01:00:07.059585 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4q489\" (UID: \"4c8e9701-fb27-481f-8572-e7f163487a92\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" Sep 30 01:00:07 crc kubenswrapper[4809]: I0930 01:00:07.059657 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2hml\" (UniqueName: \"kubernetes.io/projected/4c8e9701-fb27-481f-8572-e7f163487a92-kube-api-access-k2hml\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4q489\" (UID: \"4c8e9701-fb27-481f-8572-e7f163487a92\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" Sep 30 01:00:07 crc kubenswrapper[4809]: I0930 01:00:07.059844 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4q489\" (UID: \"4c8e9701-fb27-481f-8572-e7f163487a92\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" Sep 30 01:00:07 crc kubenswrapper[4809]: I0930 01:00:07.162106 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4q489\" (UID: \"4c8e9701-fb27-481f-8572-e7f163487a92\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" Sep 30 01:00:07 crc kubenswrapper[4809]: I0930 01:00:07.162610 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4q489\" (UID: \"4c8e9701-fb27-481f-8572-e7f163487a92\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" Sep 30 01:00:07 crc kubenswrapper[4809]: I0930 01:00:07.162743 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2hml\" (UniqueName: \"kubernetes.io/projected/4c8e9701-fb27-481f-8572-e7f163487a92-kube-api-access-k2hml\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4q489\" (UID: \"4c8e9701-fb27-481f-8572-e7f163487a92\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" Sep 30 01:00:07 crc kubenswrapper[4809]: I0930 01:00:07.162843 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4q489\" (UID: \"4c8e9701-fb27-481f-8572-e7f163487a92\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" Sep 30 01:00:07 crc kubenswrapper[4809]: I0930 01:00:07.162935 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4q489\" (UID: \"4c8e9701-fb27-481f-8572-e7f163487a92\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" Sep 30 01:00:07 crc kubenswrapper[4809]: I0930 01:00:07.166161 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4q489\" (UID: \"4c8e9701-fb27-481f-8572-e7f163487a92\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" Sep 30 01:00:07 crc kubenswrapper[4809]: I0930 01:00:07.166784 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4q489\" (UID: \"4c8e9701-fb27-481f-8572-e7f163487a92\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" Sep 30 01:00:07 crc kubenswrapper[4809]: I0930 01:00:07.168733 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4q489\" (UID: \"4c8e9701-fb27-481f-8572-e7f163487a92\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" Sep 30 01:00:07 crc kubenswrapper[4809]: I0930 01:00:07.168801 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4q489\" (UID: \"4c8e9701-fb27-481f-8572-e7f163487a92\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" Sep 30 01:00:07 crc kubenswrapper[4809]: I0930 01:00:07.196877 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2hml\" (UniqueName: \"kubernetes.io/projected/4c8e9701-fb27-481f-8572-e7f163487a92-kube-api-access-k2hml\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4q489\" (UID: \"4c8e9701-fb27-481f-8572-e7f163487a92\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" Sep 30 01:00:07 crc kubenswrapper[4809]: I0930 01:00:07.484127 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" Sep 30 01:00:08 crc kubenswrapper[4809]: I0930 01:00:08.095016 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489"] Sep 30 01:00:08 crc kubenswrapper[4809]: I0930 01:00:08.692031 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 01:00:08 crc kubenswrapper[4809]: E0930 01:00:08.692713 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:00:08 crc kubenswrapper[4809]: I0930 01:00:08.811209 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" event={"ID":"4c8e9701-fb27-481f-8572-e7f163487a92","Type":"ContainerStarted","Data":"23a602643ad3f67fa043879abb9d4bc66c51eee18073b4378c0c1224c4b12244"} Sep 30 01:00:09 crc kubenswrapper[4809]: I0930 01:00:09.831602 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" event={"ID":"4c8e9701-fb27-481f-8572-e7f163487a92","Type":"ContainerStarted","Data":"55d60c398575c21aeed2fa327498ac7d7191d3661492d4f14dd63594a316af8a"} Sep 30 01:00:09 crc kubenswrapper[4809]: I0930 01:00:09.857584 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" podStartSLOduration=3.266488485 podStartE2EDuration="3.857568893s" podCreationTimestamp="2025-09-30 01:00:06 +0000 UTC" firstStartedPulling="2025-09-30 01:00:08.097024198 +0000 UTC m=+3059.133273616" lastFinishedPulling="2025-09-30 01:00:08.688104556 +0000 UTC m=+3059.724354024" observedRunningTime="2025-09-30 01:00:09.850514009 +0000 UTC m=+3060.886763437" watchObservedRunningTime="2025-09-30 01:00:09.857568893 +0000 UTC m=+3060.893818301" Sep 30 01:00:15 crc kubenswrapper[4809]: I0930 01:00:15.846259 4809 scope.go:117] "RemoveContainer" containerID="18d3481267e8df2044d4d52ec730f4f1a9c45f2daa8162f645dcfc1857bfb831" Sep 30 01:00:15 crc kubenswrapper[4809]: I0930 01:00:15.891202 4809 scope.go:117] "RemoveContainer" containerID="faf71e3d365cb268d95dd97f415faf5e8f6ad18c7314fa6bb12c5f7024540c75" Sep 30 01:00:15 crc kubenswrapper[4809]: I0930 01:00:15.953455 4809 scope.go:117] "RemoveContainer" containerID="a32a0882f766b52834b1c9850e71ae9b6dd9c6ba28183f75a6d6f3f5dafe2bcd" Sep 30 01:00:16 crc kubenswrapper[4809]: I0930 01:00:16.029818 4809 scope.go:117] "RemoveContainer" containerID="242d74b4edafb24e91a8cf568650b46be4238da5dfceaef824bd712ca9e74a6d" Sep 30 01:00:16 crc kubenswrapper[4809]: I0930 01:00:16.070491 4809 scope.go:117] "RemoveContainer" containerID="b141a3642a92ae95db9d4a6e493fe12dbd4a13a55a52fc01d81cf07fe7bf6f06" Sep 30 01:00:16 crc kubenswrapper[4809]: I0930 01:00:16.148110 4809 scope.go:117] "RemoveContainer" containerID="4466f448d56626dba7c62709e609536817d5d00a117f2fcf32c701e0c81e0309" Sep 30 01:00:16 crc kubenswrapper[4809]: I0930 01:00:16.181584 4809 scope.go:117] "RemoveContainer" containerID="ea71be0e0c8811aaaf144e5c05b31203afe6efa00c7dcdf030858fb28ebbc995" Sep 30 01:00:16 crc kubenswrapper[4809]: I0930 01:00:16.258780 4809 scope.go:117] "RemoveContainer" containerID="fecb52858e5c57ce2303dab098f2883d803e3006fbc4b6c7e27cefafe6d44169" Sep 30 01:00:16 crc kubenswrapper[4809]: I0930 01:00:16.309374 4809 scope.go:117] "RemoveContainer" containerID="5bc24f5c5300e4b59c45fd3f4d2d439c80626e9b210eb608bbc2a7be694a1acf" Sep 30 01:00:16 crc kubenswrapper[4809]: I0930 01:00:16.363252 4809 scope.go:117] "RemoveContainer" containerID="15127d28123495dec6101b7b7d5b58f030c05076140c43a088ea442fb1f2d814" Sep 30 01:00:16 crc kubenswrapper[4809]: I0930 01:00:16.398960 4809 scope.go:117] "RemoveContainer" containerID="be415b91b7c0ad089c80fb11dcb8700408f762609f958d9899ab9af750be6ff0" Sep 30 01:00:16 crc kubenswrapper[4809]: I0930 01:00:16.462012 4809 scope.go:117] "RemoveContainer" containerID="7420898a0192d0360cf65771248df4bea6c34f22b6a815b03b528d65fbb5ecfc" Sep 30 01:00:16 crc kubenswrapper[4809]: I0930 01:00:16.501868 4809 scope.go:117] "RemoveContainer" containerID="0834ffc58f15979ef7c0585aabdb910499ca9f968ee4816334ef374874270d6c" Sep 30 01:00:16 crc kubenswrapper[4809]: I0930 01:00:16.560663 4809 scope.go:117] "RemoveContainer" containerID="3c44ea3aa91a201d9057fb4073c6d344f35e4a3701d9a409bdf256f3dc935712" Sep 30 01:00:16 crc kubenswrapper[4809]: I0930 01:00:16.606129 4809 scope.go:117] "RemoveContainer" containerID="5399a8680c7e0b31c920f96e51a06f322489f6c55088fa6bb3677f89b44dcade" Sep 30 01:00:16 crc kubenswrapper[4809]: I0930 01:00:16.642768 4809 scope.go:117] "RemoveContainer" containerID="bc7fd630d77d2aeb9260ba4e6b148b9d2cf0634d4063227e474ebf8152cf95b5" Sep 30 01:00:16 crc kubenswrapper[4809]: I0930 01:00:16.674311 4809 scope.go:117] "RemoveContainer" containerID="3629b4d139dab398dd1a828a6e9129d9166699b013e6b158fe5a2bbc4b9bb38d" Sep 30 01:00:19 crc kubenswrapper[4809]: I0930 01:00:19.701153 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 01:00:19 crc kubenswrapper[4809]: E0930 01:00:19.703203 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:00:33 crc kubenswrapper[4809]: I0930 01:00:33.690938 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 01:00:33 crc kubenswrapper[4809]: E0930 01:00:33.691697 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:00:46 crc kubenswrapper[4809]: I0930 01:00:46.691267 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 01:00:46 crc kubenswrapper[4809]: E0930 01:00:46.692077 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:00:59 crc kubenswrapper[4809]: I0930 01:00:59.697276 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 01:00:59 crc kubenswrapper[4809]: E0930 01:00:59.698036 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:01:00 crc kubenswrapper[4809]: I0930 01:01:00.168769 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29319901-rfb6f"] Sep 30 01:01:00 crc kubenswrapper[4809]: I0930 01:01:00.170555 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319901-rfb6f" Sep 30 01:01:00 crc kubenswrapper[4809]: I0930 01:01:00.182581 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319901-rfb6f"] Sep 30 01:01:00 crc kubenswrapper[4809]: I0930 01:01:00.321960 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/095e40dd-8c29-4c2f-8089-04eaec05c406-config-data\") pod \"keystone-cron-29319901-rfb6f\" (UID: \"095e40dd-8c29-4c2f-8089-04eaec05c406\") " pod="openstack/keystone-cron-29319901-rfb6f" Sep 30 01:01:00 crc kubenswrapper[4809]: I0930 01:01:00.322299 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42g9w\" (UniqueName: \"kubernetes.io/projected/095e40dd-8c29-4c2f-8089-04eaec05c406-kube-api-access-42g9w\") pod \"keystone-cron-29319901-rfb6f\" (UID: \"095e40dd-8c29-4c2f-8089-04eaec05c406\") " pod="openstack/keystone-cron-29319901-rfb6f" Sep 30 01:01:00 crc kubenswrapper[4809]: I0930 01:01:00.322358 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/095e40dd-8c29-4c2f-8089-04eaec05c406-combined-ca-bundle\") pod \"keystone-cron-29319901-rfb6f\" (UID: \"095e40dd-8c29-4c2f-8089-04eaec05c406\") " pod="openstack/keystone-cron-29319901-rfb6f" Sep 30 01:01:00 crc kubenswrapper[4809]: I0930 01:01:00.322383 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/095e40dd-8c29-4c2f-8089-04eaec05c406-fernet-keys\") pod \"keystone-cron-29319901-rfb6f\" (UID: \"095e40dd-8c29-4c2f-8089-04eaec05c406\") " pod="openstack/keystone-cron-29319901-rfb6f" Sep 30 01:01:00 crc kubenswrapper[4809]: I0930 01:01:00.423952 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42g9w\" (UniqueName: \"kubernetes.io/projected/095e40dd-8c29-4c2f-8089-04eaec05c406-kube-api-access-42g9w\") pod \"keystone-cron-29319901-rfb6f\" (UID: \"095e40dd-8c29-4c2f-8089-04eaec05c406\") " pod="openstack/keystone-cron-29319901-rfb6f" Sep 30 01:01:00 crc kubenswrapper[4809]: I0930 01:01:00.424040 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/095e40dd-8c29-4c2f-8089-04eaec05c406-combined-ca-bundle\") pod \"keystone-cron-29319901-rfb6f\" (UID: \"095e40dd-8c29-4c2f-8089-04eaec05c406\") " pod="openstack/keystone-cron-29319901-rfb6f" Sep 30 01:01:00 crc kubenswrapper[4809]: I0930 01:01:00.424065 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/095e40dd-8c29-4c2f-8089-04eaec05c406-fernet-keys\") pod \"keystone-cron-29319901-rfb6f\" (UID: \"095e40dd-8c29-4c2f-8089-04eaec05c406\") " pod="openstack/keystone-cron-29319901-rfb6f" Sep 30 01:01:00 crc kubenswrapper[4809]: I0930 01:01:00.424144 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/095e40dd-8c29-4c2f-8089-04eaec05c406-config-data\") pod \"keystone-cron-29319901-rfb6f\" (UID: \"095e40dd-8c29-4c2f-8089-04eaec05c406\") " pod="openstack/keystone-cron-29319901-rfb6f" Sep 30 01:01:00 crc kubenswrapper[4809]: I0930 01:01:00.431528 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/095e40dd-8c29-4c2f-8089-04eaec05c406-fernet-keys\") pod \"keystone-cron-29319901-rfb6f\" (UID: \"095e40dd-8c29-4c2f-8089-04eaec05c406\") " pod="openstack/keystone-cron-29319901-rfb6f" Sep 30 01:01:00 crc kubenswrapper[4809]: I0930 01:01:00.433340 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/095e40dd-8c29-4c2f-8089-04eaec05c406-combined-ca-bundle\") pod \"keystone-cron-29319901-rfb6f\" (UID: \"095e40dd-8c29-4c2f-8089-04eaec05c406\") " pod="openstack/keystone-cron-29319901-rfb6f" Sep 30 01:01:00 crc kubenswrapper[4809]: I0930 01:01:00.433441 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/095e40dd-8c29-4c2f-8089-04eaec05c406-config-data\") pod \"keystone-cron-29319901-rfb6f\" (UID: \"095e40dd-8c29-4c2f-8089-04eaec05c406\") " pod="openstack/keystone-cron-29319901-rfb6f" Sep 30 01:01:00 crc kubenswrapper[4809]: I0930 01:01:00.445412 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42g9w\" (UniqueName: \"kubernetes.io/projected/095e40dd-8c29-4c2f-8089-04eaec05c406-kube-api-access-42g9w\") pod \"keystone-cron-29319901-rfb6f\" (UID: \"095e40dd-8c29-4c2f-8089-04eaec05c406\") " pod="openstack/keystone-cron-29319901-rfb6f" Sep 30 01:01:00 crc kubenswrapper[4809]: I0930 01:01:00.500155 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319901-rfb6f" Sep 30 01:01:01 crc kubenswrapper[4809]: I0930 01:01:01.024002 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319901-rfb6f"] Sep 30 01:01:01 crc kubenswrapper[4809]: I0930 01:01:01.389246 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319901-rfb6f" event={"ID":"095e40dd-8c29-4c2f-8089-04eaec05c406","Type":"ContainerStarted","Data":"e897f882bf249452f903d7685bfc62c451664fd41aa592c5ebe4ba29e6c729f7"} Sep 30 01:01:01 crc kubenswrapper[4809]: I0930 01:01:01.389679 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319901-rfb6f" event={"ID":"095e40dd-8c29-4c2f-8089-04eaec05c406","Type":"ContainerStarted","Data":"db21139fd3f0c43457d76a2ffa5af492fb8108ee571ff92f2bf5a4272b64e51a"} Sep 30 01:01:01 crc kubenswrapper[4809]: I0930 01:01:01.419205 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29319901-rfb6f" podStartSLOduration=1.41917958 podStartE2EDuration="1.41917958s" podCreationTimestamp="2025-09-30 01:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 01:01:01.414348487 +0000 UTC m=+3112.450597905" watchObservedRunningTime="2025-09-30 01:01:01.41917958 +0000 UTC m=+3112.455428998" Sep 30 01:01:04 crc kubenswrapper[4809]: I0930 01:01:04.421416 4809 generic.go:334] "Generic (PLEG): container finished" podID="095e40dd-8c29-4c2f-8089-04eaec05c406" containerID="e897f882bf249452f903d7685bfc62c451664fd41aa592c5ebe4ba29e6c729f7" exitCode=0 Sep 30 01:01:04 crc kubenswrapper[4809]: I0930 01:01:04.422105 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319901-rfb6f" event={"ID":"095e40dd-8c29-4c2f-8089-04eaec05c406","Type":"ContainerDied","Data":"e897f882bf249452f903d7685bfc62c451664fd41aa592c5ebe4ba29e6c729f7"} Sep 30 01:01:05 crc kubenswrapper[4809]: I0930 01:01:05.835294 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319901-rfb6f" Sep 30 01:01:05 crc kubenswrapper[4809]: I0930 01:01:05.940313 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-42g9w\" (UniqueName: \"kubernetes.io/projected/095e40dd-8c29-4c2f-8089-04eaec05c406-kube-api-access-42g9w\") pod \"095e40dd-8c29-4c2f-8089-04eaec05c406\" (UID: \"095e40dd-8c29-4c2f-8089-04eaec05c406\") " Sep 30 01:01:05 crc kubenswrapper[4809]: I0930 01:01:05.940392 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/095e40dd-8c29-4c2f-8089-04eaec05c406-fernet-keys\") pod \"095e40dd-8c29-4c2f-8089-04eaec05c406\" (UID: \"095e40dd-8c29-4c2f-8089-04eaec05c406\") " Sep 30 01:01:05 crc kubenswrapper[4809]: I0930 01:01:05.940481 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/095e40dd-8c29-4c2f-8089-04eaec05c406-combined-ca-bundle\") pod \"095e40dd-8c29-4c2f-8089-04eaec05c406\" (UID: \"095e40dd-8c29-4c2f-8089-04eaec05c406\") " Sep 30 01:01:05 crc kubenswrapper[4809]: I0930 01:01:05.940538 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/095e40dd-8c29-4c2f-8089-04eaec05c406-config-data\") pod \"095e40dd-8c29-4c2f-8089-04eaec05c406\" (UID: \"095e40dd-8c29-4c2f-8089-04eaec05c406\") " Sep 30 01:01:05 crc kubenswrapper[4809]: I0930 01:01:05.946709 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/095e40dd-8c29-4c2f-8089-04eaec05c406-kube-api-access-42g9w" (OuterVolumeSpecName: "kube-api-access-42g9w") pod "095e40dd-8c29-4c2f-8089-04eaec05c406" (UID: "095e40dd-8c29-4c2f-8089-04eaec05c406"). InnerVolumeSpecName "kube-api-access-42g9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:01:05 crc kubenswrapper[4809]: I0930 01:01:05.948561 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/095e40dd-8c29-4c2f-8089-04eaec05c406-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "095e40dd-8c29-4c2f-8089-04eaec05c406" (UID: "095e40dd-8c29-4c2f-8089-04eaec05c406"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:01:05 crc kubenswrapper[4809]: I0930 01:01:05.972334 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/095e40dd-8c29-4c2f-8089-04eaec05c406-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "095e40dd-8c29-4c2f-8089-04eaec05c406" (UID: "095e40dd-8c29-4c2f-8089-04eaec05c406"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:01:05 crc kubenswrapper[4809]: I0930 01:01:05.998353 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/095e40dd-8c29-4c2f-8089-04eaec05c406-config-data" (OuterVolumeSpecName: "config-data") pod "095e40dd-8c29-4c2f-8089-04eaec05c406" (UID: "095e40dd-8c29-4c2f-8089-04eaec05c406"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:01:06 crc kubenswrapper[4809]: I0930 01:01:06.043101 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42g9w\" (UniqueName: \"kubernetes.io/projected/095e40dd-8c29-4c2f-8089-04eaec05c406-kube-api-access-42g9w\") on node \"crc\" DevicePath \"\"" Sep 30 01:01:06 crc kubenswrapper[4809]: I0930 01:01:06.043135 4809 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/095e40dd-8c29-4c2f-8089-04eaec05c406-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 01:01:06 crc kubenswrapper[4809]: I0930 01:01:06.043152 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/095e40dd-8c29-4c2f-8089-04eaec05c406-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:01:06 crc kubenswrapper[4809]: I0930 01:01:06.043164 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/095e40dd-8c29-4c2f-8089-04eaec05c406-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:01:06 crc kubenswrapper[4809]: I0930 01:01:06.444392 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319901-rfb6f" event={"ID":"095e40dd-8c29-4c2f-8089-04eaec05c406","Type":"ContainerDied","Data":"db21139fd3f0c43457d76a2ffa5af492fb8108ee571ff92f2bf5a4272b64e51a"} Sep 30 01:01:06 crc kubenswrapper[4809]: I0930 01:01:06.444437 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db21139fd3f0c43457d76a2ffa5af492fb8108ee571ff92f2bf5a4272b64e51a" Sep 30 01:01:06 crc kubenswrapper[4809]: I0930 01:01:06.444448 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319901-rfb6f" Sep 30 01:01:10 crc kubenswrapper[4809]: I0930 01:01:10.690733 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 01:01:10 crc kubenswrapper[4809]: E0930 01:01:10.691416 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:01:21 crc kubenswrapper[4809]: I0930 01:01:21.691340 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 01:01:21 crc kubenswrapper[4809]: E0930 01:01:21.692897 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:01:35 crc kubenswrapper[4809]: I0930 01:01:35.691262 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 01:01:35 crc kubenswrapper[4809]: E0930 01:01:35.692007 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:01:50 crc kubenswrapper[4809]: I0930 01:01:50.691179 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 01:01:50 crc kubenswrapper[4809]: E0930 01:01:50.691920 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:02:00 crc kubenswrapper[4809]: I0930 01:02:00.021012 4809 generic.go:334] "Generic (PLEG): container finished" podID="4c8e9701-fb27-481f-8572-e7f163487a92" containerID="55d60c398575c21aeed2fa327498ac7d7191d3661492d4f14dd63594a316af8a" exitCode=0 Sep 30 01:02:00 crc kubenswrapper[4809]: I0930 01:02:00.021139 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" event={"ID":"4c8e9701-fb27-481f-8572-e7f163487a92","Type":"ContainerDied","Data":"55d60c398575c21aeed2fa327498ac7d7191d3661492d4f14dd63594a316af8a"} Sep 30 01:02:01 crc kubenswrapper[4809]: I0930 01:02:01.719703 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" Sep 30 01:02:01 crc kubenswrapper[4809]: I0930 01:02:01.847909 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-ceph\") pod \"4c8e9701-fb27-481f-8572-e7f163487a92\" (UID: \"4c8e9701-fb27-481f-8572-e7f163487a92\") " Sep 30 01:02:01 crc kubenswrapper[4809]: I0930 01:02:01.848020 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-ssh-key\") pod \"4c8e9701-fb27-481f-8572-e7f163487a92\" (UID: \"4c8e9701-fb27-481f-8572-e7f163487a92\") " Sep 30 01:02:01 crc kubenswrapper[4809]: I0930 01:02:01.848170 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-inventory\") pod \"4c8e9701-fb27-481f-8572-e7f163487a92\" (UID: \"4c8e9701-fb27-481f-8572-e7f163487a92\") " Sep 30 01:02:01 crc kubenswrapper[4809]: I0930 01:02:01.848400 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-bootstrap-combined-ca-bundle\") pod \"4c8e9701-fb27-481f-8572-e7f163487a92\" (UID: \"4c8e9701-fb27-481f-8572-e7f163487a92\") " Sep 30 01:02:01 crc kubenswrapper[4809]: I0930 01:02:01.849076 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2hml\" (UniqueName: \"kubernetes.io/projected/4c8e9701-fb27-481f-8572-e7f163487a92-kube-api-access-k2hml\") pod \"4c8e9701-fb27-481f-8572-e7f163487a92\" (UID: \"4c8e9701-fb27-481f-8572-e7f163487a92\") " Sep 30 01:02:01 crc kubenswrapper[4809]: I0930 01:02:01.854328 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-ceph" (OuterVolumeSpecName: "ceph") pod "4c8e9701-fb27-481f-8572-e7f163487a92" (UID: "4c8e9701-fb27-481f-8572-e7f163487a92"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:01 crc kubenswrapper[4809]: I0930 01:02:01.855079 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c8e9701-fb27-481f-8572-e7f163487a92-kube-api-access-k2hml" (OuterVolumeSpecName: "kube-api-access-k2hml") pod "4c8e9701-fb27-481f-8572-e7f163487a92" (UID: "4c8e9701-fb27-481f-8572-e7f163487a92"). InnerVolumeSpecName "kube-api-access-k2hml". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:02:01 crc kubenswrapper[4809]: I0930 01:02:01.855992 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "4c8e9701-fb27-481f-8572-e7f163487a92" (UID: "4c8e9701-fb27-481f-8572-e7f163487a92"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:01 crc kubenswrapper[4809]: I0930 01:02:01.879162 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-inventory" (OuterVolumeSpecName: "inventory") pod "4c8e9701-fb27-481f-8572-e7f163487a92" (UID: "4c8e9701-fb27-481f-8572-e7f163487a92"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:01 crc kubenswrapper[4809]: I0930 01:02:01.881098 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4c8e9701-fb27-481f-8572-e7f163487a92" (UID: "4c8e9701-fb27-481f-8572-e7f163487a92"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:01 crc kubenswrapper[4809]: I0930 01:02:01.951508 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2hml\" (UniqueName: \"kubernetes.io/projected/4c8e9701-fb27-481f-8572-e7f163487a92-kube-api-access-k2hml\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:01 crc kubenswrapper[4809]: I0930 01:02:01.951541 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:01 crc kubenswrapper[4809]: I0930 01:02:01.951553 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:01 crc kubenswrapper[4809]: I0930 01:02:01.951564 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:01 crc kubenswrapper[4809]: I0930 01:02:01.951576 4809 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c8e9701-fb27-481f-8572-e7f163487a92-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.042976 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" event={"ID":"4c8e9701-fb27-481f-8572-e7f163487a92","Type":"ContainerDied","Data":"23a602643ad3f67fa043879abb9d4bc66c51eee18073b4378c0c1224c4b12244"} Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.043031 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="23a602643ad3f67fa043879abb9d4bc66c51eee18073b4378c0c1224c4b12244" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.043035 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4q489" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.140683 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7"] Sep 30 01:02:02 crc kubenswrapper[4809]: E0930 01:02:02.141135 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c8e9701-fb27-481f-8572-e7f163487a92" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.141157 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c8e9701-fb27-481f-8572-e7f163487a92" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 01:02:02 crc kubenswrapper[4809]: E0930 01:02:02.141184 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="095e40dd-8c29-4c2f-8089-04eaec05c406" containerName="keystone-cron" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.141194 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="095e40dd-8c29-4c2f-8089-04eaec05c406" containerName="keystone-cron" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.141451 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c8e9701-fb27-481f-8572-e7f163487a92" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.141486 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="095e40dd-8c29-4c2f-8089-04eaec05c406" containerName="keystone-cron" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.142283 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.144303 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.144829 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.145086 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.145247 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.145412 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.156368 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgvfx\" (UniqueName: \"kubernetes.io/projected/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-kube-api-access-wgvfx\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7\" (UID: \"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.156471 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7\" (UID: \"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.156508 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7\" (UID: \"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.157008 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7\" (UID: \"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.162899 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7"] Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.257974 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7\" (UID: \"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.258483 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgvfx\" (UniqueName: \"kubernetes.io/projected/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-kube-api-access-wgvfx\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7\" (UID: \"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.258515 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7\" (UID: \"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.258535 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7\" (UID: \"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.263814 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7\" (UID: \"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.272370 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7\" (UID: \"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.272581 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7\" (UID: \"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.275417 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgvfx\" (UniqueName: \"kubernetes.io/projected/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-kube-api-access-wgvfx\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7\" (UID: \"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.461774 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" Sep 30 01:02:02 crc kubenswrapper[4809]: I0930 01:02:02.710770 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 01:02:02 crc kubenswrapper[4809]: E0930 01:02:02.712062 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:02:03 crc kubenswrapper[4809]: I0930 01:02:03.064694 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7"] Sep 30 01:02:03 crc kubenswrapper[4809]: W0930 01:02:03.068256 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod70922d4c_72b5_4c9e_ae16_1fb2d7c15a07.slice/crio-e64734c795b86d04e18bf235a2b8826812e60ac8d19d75afed585b93e89b82e8 WatchSource:0}: Error finding container e64734c795b86d04e18bf235a2b8826812e60ac8d19d75afed585b93e89b82e8: Status 404 returned error can't find the container with id e64734c795b86d04e18bf235a2b8826812e60ac8d19d75afed585b93e89b82e8 Sep 30 01:02:03 crc kubenswrapper[4809]: I0930 01:02:03.070950 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 01:02:04 crc kubenswrapper[4809]: I0930 01:02:04.065596 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" event={"ID":"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07","Type":"ContainerStarted","Data":"5cda6eaa0d59f8ed6be7a778b57a2d2ed194b1c12bad0b00afeb0150099c6783"} Sep 30 01:02:04 crc kubenswrapper[4809]: I0930 01:02:04.066020 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" event={"ID":"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07","Type":"ContainerStarted","Data":"e64734c795b86d04e18bf235a2b8826812e60ac8d19d75afed585b93e89b82e8"} Sep 30 01:02:04 crc kubenswrapper[4809]: I0930 01:02:04.097095 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" podStartSLOduration=1.502207851 podStartE2EDuration="2.097064823s" podCreationTimestamp="2025-09-30 01:02:02 +0000 UTC" firstStartedPulling="2025-09-30 01:02:03.070730792 +0000 UTC m=+3174.106980190" lastFinishedPulling="2025-09-30 01:02:03.665587734 +0000 UTC m=+3174.701837162" observedRunningTime="2025-09-30 01:02:04.09147961 +0000 UTC m=+3175.127729048" watchObservedRunningTime="2025-09-30 01:02:04.097064823 +0000 UTC m=+3175.133314271" Sep 30 01:02:14 crc kubenswrapper[4809]: I0930 01:02:14.690715 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 01:02:14 crc kubenswrapper[4809]: E0930 01:02:14.691678 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:02:28 crc kubenswrapper[4809]: I0930 01:02:28.690946 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 01:02:28 crc kubenswrapper[4809]: E0930 01:02:28.693536 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:02:36 crc kubenswrapper[4809]: I0930 01:02:36.441485 4809 generic.go:334] "Generic (PLEG): container finished" podID="70922d4c-72b5-4c9e-ae16-1fb2d7c15a07" containerID="5cda6eaa0d59f8ed6be7a778b57a2d2ed194b1c12bad0b00afeb0150099c6783" exitCode=0 Sep 30 01:02:36 crc kubenswrapper[4809]: I0930 01:02:36.441604 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" event={"ID":"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07","Type":"ContainerDied","Data":"5cda6eaa0d59f8ed6be7a778b57a2d2ed194b1c12bad0b00afeb0150099c6783"} Sep 30 01:02:37 crc kubenswrapper[4809]: I0930 01:02:37.989539 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.096751 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgvfx\" (UniqueName: \"kubernetes.io/projected/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-kube-api-access-wgvfx\") pod \"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07\" (UID: \"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07\") " Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.097045 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-ssh-key\") pod \"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07\" (UID: \"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07\") " Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.097223 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-inventory\") pod \"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07\" (UID: \"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07\") " Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.097400 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-ceph\") pod \"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07\" (UID: \"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07\") " Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.103333 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-kube-api-access-wgvfx" (OuterVolumeSpecName: "kube-api-access-wgvfx") pod "70922d4c-72b5-4c9e-ae16-1fb2d7c15a07" (UID: "70922d4c-72b5-4c9e-ae16-1fb2d7c15a07"). InnerVolumeSpecName "kube-api-access-wgvfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.104433 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-ceph" (OuterVolumeSpecName: "ceph") pod "70922d4c-72b5-4c9e-ae16-1fb2d7c15a07" (UID: "70922d4c-72b5-4c9e-ae16-1fb2d7c15a07"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.147842 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "70922d4c-72b5-4c9e-ae16-1fb2d7c15a07" (UID: "70922d4c-72b5-4c9e-ae16-1fb2d7c15a07"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.149843 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-inventory" (OuterVolumeSpecName: "inventory") pod "70922d4c-72b5-4c9e-ae16-1fb2d7c15a07" (UID: "70922d4c-72b5-4c9e-ae16-1fb2d7c15a07"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.199905 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgvfx\" (UniqueName: \"kubernetes.io/projected/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-kube-api-access-wgvfx\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.199950 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.199967 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.199987 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/70922d4c-72b5-4c9e-ae16-1fb2d7c15a07-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.470412 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" event={"ID":"70922d4c-72b5-4c9e-ae16-1fb2d7c15a07","Type":"ContainerDied","Data":"e64734c795b86d04e18bf235a2b8826812e60ac8d19d75afed585b93e89b82e8"} Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.470461 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e64734c795b86d04e18bf235a2b8826812e60ac8d19d75afed585b93e89b82e8" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.470520 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.584774 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb"] Sep 30 01:02:38 crc kubenswrapper[4809]: E0930 01:02:38.585289 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70922d4c-72b5-4c9e-ae16-1fb2d7c15a07" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.585316 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="70922d4c-72b5-4c9e-ae16-1fb2d7c15a07" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.585606 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="70922d4c-72b5-4c9e-ae16-1fb2d7c15a07" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.586590 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.589031 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.589382 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.590171 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.591366 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.592057 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.609575 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb"] Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.708527 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/798a0c6b-ac60-4345-8359-e3db0a97744c-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb\" (UID: \"798a0c6b-ac60-4345-8359-e3db0a97744c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.708591 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqlzs\" (UniqueName: \"kubernetes.io/projected/798a0c6b-ac60-4345-8359-e3db0a97744c-kube-api-access-pqlzs\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb\" (UID: \"798a0c6b-ac60-4345-8359-e3db0a97744c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.708699 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/798a0c6b-ac60-4345-8359-e3db0a97744c-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb\" (UID: \"798a0c6b-ac60-4345-8359-e3db0a97744c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.708745 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/798a0c6b-ac60-4345-8359-e3db0a97744c-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb\" (UID: \"798a0c6b-ac60-4345-8359-e3db0a97744c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.811621 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/798a0c6b-ac60-4345-8359-e3db0a97744c-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb\" (UID: \"798a0c6b-ac60-4345-8359-e3db0a97744c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.811770 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqlzs\" (UniqueName: \"kubernetes.io/projected/798a0c6b-ac60-4345-8359-e3db0a97744c-kube-api-access-pqlzs\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb\" (UID: \"798a0c6b-ac60-4345-8359-e3db0a97744c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.811922 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/798a0c6b-ac60-4345-8359-e3db0a97744c-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb\" (UID: \"798a0c6b-ac60-4345-8359-e3db0a97744c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.812026 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/798a0c6b-ac60-4345-8359-e3db0a97744c-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb\" (UID: \"798a0c6b-ac60-4345-8359-e3db0a97744c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.816586 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/798a0c6b-ac60-4345-8359-e3db0a97744c-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb\" (UID: \"798a0c6b-ac60-4345-8359-e3db0a97744c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.816621 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/798a0c6b-ac60-4345-8359-e3db0a97744c-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb\" (UID: \"798a0c6b-ac60-4345-8359-e3db0a97744c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.816743 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/798a0c6b-ac60-4345-8359-e3db0a97744c-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb\" (UID: \"798a0c6b-ac60-4345-8359-e3db0a97744c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.833036 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqlzs\" (UniqueName: \"kubernetes.io/projected/798a0c6b-ac60-4345-8359-e3db0a97744c-kube-api-access-pqlzs\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb\" (UID: \"798a0c6b-ac60-4345-8359-e3db0a97744c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" Sep 30 01:02:38 crc kubenswrapper[4809]: I0930 01:02:38.918558 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" Sep 30 01:02:39 crc kubenswrapper[4809]: I0930 01:02:39.525206 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb"] Sep 30 01:02:40 crc kubenswrapper[4809]: I0930 01:02:40.494688 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" event={"ID":"798a0c6b-ac60-4345-8359-e3db0a97744c","Type":"ContainerStarted","Data":"50386a633f1fe79331d27901d39b293a3f9fe239a243b68c726db7512e45d24c"} Sep 30 01:02:40 crc kubenswrapper[4809]: I0930 01:02:40.495395 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" event={"ID":"798a0c6b-ac60-4345-8359-e3db0a97744c","Type":"ContainerStarted","Data":"095bde8431fd7cdd75f9cd00aafcbcb8696b58a9f91114b83efe251639b6d8d7"} Sep 30 01:02:40 crc kubenswrapper[4809]: I0930 01:02:40.522085 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" podStartSLOduration=2.101500514 podStartE2EDuration="2.522042494s" podCreationTimestamp="2025-09-30 01:02:38 +0000 UTC" firstStartedPulling="2025-09-30 01:02:39.527444762 +0000 UTC m=+3210.563694180" lastFinishedPulling="2025-09-30 01:02:39.947986742 +0000 UTC m=+3210.984236160" observedRunningTime="2025-09-30 01:02:40.517315744 +0000 UTC m=+3211.553565182" watchObservedRunningTime="2025-09-30 01:02:40.522042494 +0000 UTC m=+3211.558291912" Sep 30 01:02:40 crc kubenswrapper[4809]: I0930 01:02:40.692167 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 01:02:40 crc kubenswrapper[4809]: E0930 01:02:40.692801 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:02:46 crc kubenswrapper[4809]: I0930 01:02:46.567317 4809 generic.go:334] "Generic (PLEG): container finished" podID="798a0c6b-ac60-4345-8359-e3db0a97744c" containerID="50386a633f1fe79331d27901d39b293a3f9fe239a243b68c726db7512e45d24c" exitCode=0 Sep 30 01:02:46 crc kubenswrapper[4809]: I0930 01:02:46.567402 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" event={"ID":"798a0c6b-ac60-4345-8359-e3db0a97744c","Type":"ContainerDied","Data":"50386a633f1fe79331d27901d39b293a3f9fe239a243b68c726db7512e45d24c"} Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.044093 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.130339 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/798a0c6b-ac60-4345-8359-e3db0a97744c-ceph\") pod \"798a0c6b-ac60-4345-8359-e3db0a97744c\" (UID: \"798a0c6b-ac60-4345-8359-e3db0a97744c\") " Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.130818 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/798a0c6b-ac60-4345-8359-e3db0a97744c-ssh-key\") pod \"798a0c6b-ac60-4345-8359-e3db0a97744c\" (UID: \"798a0c6b-ac60-4345-8359-e3db0a97744c\") " Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.130852 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pqlzs\" (UniqueName: \"kubernetes.io/projected/798a0c6b-ac60-4345-8359-e3db0a97744c-kube-api-access-pqlzs\") pod \"798a0c6b-ac60-4345-8359-e3db0a97744c\" (UID: \"798a0c6b-ac60-4345-8359-e3db0a97744c\") " Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.130881 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/798a0c6b-ac60-4345-8359-e3db0a97744c-inventory\") pod \"798a0c6b-ac60-4345-8359-e3db0a97744c\" (UID: \"798a0c6b-ac60-4345-8359-e3db0a97744c\") " Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.136355 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/798a0c6b-ac60-4345-8359-e3db0a97744c-ceph" (OuterVolumeSpecName: "ceph") pod "798a0c6b-ac60-4345-8359-e3db0a97744c" (UID: "798a0c6b-ac60-4345-8359-e3db0a97744c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.139073 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/798a0c6b-ac60-4345-8359-e3db0a97744c-kube-api-access-pqlzs" (OuterVolumeSpecName: "kube-api-access-pqlzs") pod "798a0c6b-ac60-4345-8359-e3db0a97744c" (UID: "798a0c6b-ac60-4345-8359-e3db0a97744c"). InnerVolumeSpecName "kube-api-access-pqlzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.166615 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/798a0c6b-ac60-4345-8359-e3db0a97744c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "798a0c6b-ac60-4345-8359-e3db0a97744c" (UID: "798a0c6b-ac60-4345-8359-e3db0a97744c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.167623 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/798a0c6b-ac60-4345-8359-e3db0a97744c-inventory" (OuterVolumeSpecName: "inventory") pod "798a0c6b-ac60-4345-8359-e3db0a97744c" (UID: "798a0c6b-ac60-4345-8359-e3db0a97744c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.232842 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/798a0c6b-ac60-4345-8359-e3db0a97744c-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.232875 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/798a0c6b-ac60-4345-8359-e3db0a97744c-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.232887 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/798a0c6b-ac60-4345-8359-e3db0a97744c-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.232899 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pqlzs\" (UniqueName: \"kubernetes.io/projected/798a0c6b-ac60-4345-8359-e3db0a97744c-kube-api-access-pqlzs\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.596503 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" event={"ID":"798a0c6b-ac60-4345-8359-e3db0a97744c","Type":"ContainerDied","Data":"095bde8431fd7cdd75f9cd00aafcbcb8696b58a9f91114b83efe251639b6d8d7"} Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.596541 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="095bde8431fd7cdd75f9cd00aafcbcb8696b58a9f91114b83efe251639b6d8d7" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.596555 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.677221 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw"] Sep 30 01:02:48 crc kubenswrapper[4809]: E0930 01:02:48.677791 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="798a0c6b-ac60-4345-8359-e3db0a97744c" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.677816 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="798a0c6b-ac60-4345-8359-e3db0a97744c" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.678107 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="798a0c6b-ac60-4345-8359-e3db0a97744c" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.679167 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.697767 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw"] Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.725765 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.726127 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.726284 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.726326 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.726479 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.857059 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e994ba19-e00a-4362-8866-130d7bb0bd5a-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-96dxw\" (UID: \"e994ba19-e00a-4362-8866-130d7bb0bd5a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.857251 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e994ba19-e00a-4362-8866-130d7bb0bd5a-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-96dxw\" (UID: \"e994ba19-e00a-4362-8866-130d7bb0bd5a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.857512 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbrxs\" (UniqueName: \"kubernetes.io/projected/e994ba19-e00a-4362-8866-130d7bb0bd5a-kube-api-access-qbrxs\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-96dxw\" (UID: \"e994ba19-e00a-4362-8866-130d7bb0bd5a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.857799 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e994ba19-e00a-4362-8866-130d7bb0bd5a-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-96dxw\" (UID: \"e994ba19-e00a-4362-8866-130d7bb0bd5a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.961062 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbrxs\" (UniqueName: \"kubernetes.io/projected/e994ba19-e00a-4362-8866-130d7bb0bd5a-kube-api-access-qbrxs\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-96dxw\" (UID: \"e994ba19-e00a-4362-8866-130d7bb0bd5a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.961275 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e994ba19-e00a-4362-8866-130d7bb0bd5a-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-96dxw\" (UID: \"e994ba19-e00a-4362-8866-130d7bb0bd5a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.961562 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e994ba19-e00a-4362-8866-130d7bb0bd5a-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-96dxw\" (UID: \"e994ba19-e00a-4362-8866-130d7bb0bd5a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.961715 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e994ba19-e00a-4362-8866-130d7bb0bd5a-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-96dxw\" (UID: \"e994ba19-e00a-4362-8866-130d7bb0bd5a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.966872 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e994ba19-e00a-4362-8866-130d7bb0bd5a-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-96dxw\" (UID: \"e994ba19-e00a-4362-8866-130d7bb0bd5a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.967735 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e994ba19-e00a-4362-8866-130d7bb0bd5a-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-96dxw\" (UID: \"e994ba19-e00a-4362-8866-130d7bb0bd5a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.970617 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e994ba19-e00a-4362-8866-130d7bb0bd5a-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-96dxw\" (UID: \"e994ba19-e00a-4362-8866-130d7bb0bd5a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" Sep 30 01:02:48 crc kubenswrapper[4809]: I0930 01:02:48.995624 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbrxs\" (UniqueName: \"kubernetes.io/projected/e994ba19-e00a-4362-8866-130d7bb0bd5a-kube-api-access-qbrxs\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-96dxw\" (UID: \"e994ba19-e00a-4362-8866-130d7bb0bd5a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" Sep 30 01:02:49 crc kubenswrapper[4809]: I0930 01:02:49.045191 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" Sep 30 01:02:49 crc kubenswrapper[4809]: I0930 01:02:49.636688 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw"] Sep 30 01:02:50 crc kubenswrapper[4809]: I0930 01:02:50.632485 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" event={"ID":"e994ba19-e00a-4362-8866-130d7bb0bd5a","Type":"ContainerStarted","Data":"86655f41f855697d6a81b59974925e432784b200e1ad4264422821f4f8d500cb"} Sep 30 01:02:50 crc kubenswrapper[4809]: I0930 01:02:50.632950 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" event={"ID":"e994ba19-e00a-4362-8866-130d7bb0bd5a","Type":"ContainerStarted","Data":"82779f06245c4c8c71652e6a2d61bcc4b2848cf9c692fdb58e90234b59d5135b"} Sep 30 01:02:50 crc kubenswrapper[4809]: I0930 01:02:50.655100 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" podStartSLOduration=2.230289608 podStartE2EDuration="2.655077564s" podCreationTimestamp="2025-09-30 01:02:48 +0000 UTC" firstStartedPulling="2025-09-30 01:02:49.641151264 +0000 UTC m=+3220.677400692" lastFinishedPulling="2025-09-30 01:02:50.0659392 +0000 UTC m=+3221.102188648" observedRunningTime="2025-09-30 01:02:50.649305096 +0000 UTC m=+3221.685554524" watchObservedRunningTime="2025-09-30 01:02:50.655077564 +0000 UTC m=+3221.691326982" Sep 30 01:02:54 crc kubenswrapper[4809]: I0930 01:02:54.691043 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 01:02:54 crc kubenswrapper[4809]: E0930 01:02:54.691863 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:03:08 crc kubenswrapper[4809]: I0930 01:03:08.691709 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 01:03:09 crc kubenswrapper[4809]: I0930 01:03:09.851227 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"207c364356b5c57377501c5f3090f0528fd819671371266d7c3d3673523548d3"} Sep 30 01:03:41 crc kubenswrapper[4809]: I0930 01:03:41.202771 4809 generic.go:334] "Generic (PLEG): container finished" podID="e994ba19-e00a-4362-8866-130d7bb0bd5a" containerID="86655f41f855697d6a81b59974925e432784b200e1ad4264422821f4f8d500cb" exitCode=0 Sep 30 01:03:41 crc kubenswrapper[4809]: I0930 01:03:41.202889 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" event={"ID":"e994ba19-e00a-4362-8866-130d7bb0bd5a","Type":"ContainerDied","Data":"86655f41f855697d6a81b59974925e432784b200e1ad4264422821f4f8d500cb"} Sep 30 01:03:42 crc kubenswrapper[4809]: I0930 01:03:42.713177 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" Sep 30 01:03:42 crc kubenswrapper[4809]: I0930 01:03:42.767490 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e994ba19-e00a-4362-8866-130d7bb0bd5a-ceph\") pod \"e994ba19-e00a-4362-8866-130d7bb0bd5a\" (UID: \"e994ba19-e00a-4362-8866-130d7bb0bd5a\") " Sep 30 01:03:42 crc kubenswrapper[4809]: I0930 01:03:42.767768 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbrxs\" (UniqueName: \"kubernetes.io/projected/e994ba19-e00a-4362-8866-130d7bb0bd5a-kube-api-access-qbrxs\") pod \"e994ba19-e00a-4362-8866-130d7bb0bd5a\" (UID: \"e994ba19-e00a-4362-8866-130d7bb0bd5a\") " Sep 30 01:03:42 crc kubenswrapper[4809]: I0930 01:03:42.767890 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e994ba19-e00a-4362-8866-130d7bb0bd5a-inventory\") pod \"e994ba19-e00a-4362-8866-130d7bb0bd5a\" (UID: \"e994ba19-e00a-4362-8866-130d7bb0bd5a\") " Sep 30 01:03:42 crc kubenswrapper[4809]: I0930 01:03:42.767948 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e994ba19-e00a-4362-8866-130d7bb0bd5a-ssh-key\") pod \"e994ba19-e00a-4362-8866-130d7bb0bd5a\" (UID: \"e994ba19-e00a-4362-8866-130d7bb0bd5a\") " Sep 30 01:03:42 crc kubenswrapper[4809]: I0930 01:03:42.776202 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e994ba19-e00a-4362-8866-130d7bb0bd5a-ceph" (OuterVolumeSpecName: "ceph") pod "e994ba19-e00a-4362-8866-130d7bb0bd5a" (UID: "e994ba19-e00a-4362-8866-130d7bb0bd5a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:03:42 crc kubenswrapper[4809]: I0930 01:03:42.777889 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e994ba19-e00a-4362-8866-130d7bb0bd5a-kube-api-access-qbrxs" (OuterVolumeSpecName: "kube-api-access-qbrxs") pod "e994ba19-e00a-4362-8866-130d7bb0bd5a" (UID: "e994ba19-e00a-4362-8866-130d7bb0bd5a"). InnerVolumeSpecName "kube-api-access-qbrxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:03:42 crc kubenswrapper[4809]: I0930 01:03:42.802541 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e994ba19-e00a-4362-8866-130d7bb0bd5a-inventory" (OuterVolumeSpecName: "inventory") pod "e994ba19-e00a-4362-8866-130d7bb0bd5a" (UID: "e994ba19-e00a-4362-8866-130d7bb0bd5a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:03:42 crc kubenswrapper[4809]: I0930 01:03:42.803993 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e994ba19-e00a-4362-8866-130d7bb0bd5a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e994ba19-e00a-4362-8866-130d7bb0bd5a" (UID: "e994ba19-e00a-4362-8866-130d7bb0bd5a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:03:42 crc kubenswrapper[4809]: I0930 01:03:42.870792 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e994ba19-e00a-4362-8866-130d7bb0bd5a-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 01:03:42 crc kubenswrapper[4809]: I0930 01:03:42.870823 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e994ba19-e00a-4362-8866-130d7bb0bd5a-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:03:42 crc kubenswrapper[4809]: I0930 01:03:42.870832 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e994ba19-e00a-4362-8866-130d7bb0bd5a-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:03:42 crc kubenswrapper[4809]: I0930 01:03:42.870840 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbrxs\" (UniqueName: \"kubernetes.io/projected/e994ba19-e00a-4362-8866-130d7bb0bd5a-kube-api-access-qbrxs\") on node \"crc\" DevicePath \"\"" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.224718 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" event={"ID":"e994ba19-e00a-4362-8866-130d7bb0bd5a","Type":"ContainerDied","Data":"82779f06245c4c8c71652e6a2d61bcc4b2848cf9c692fdb58e90234b59d5135b"} Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.224994 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="82779f06245c4c8c71652e6a2d61bcc4b2848cf9c692fdb58e90234b59d5135b" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.225124 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-96dxw" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.323517 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw"] Sep 30 01:03:43 crc kubenswrapper[4809]: E0930 01:03:43.324143 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e994ba19-e00a-4362-8866-130d7bb0bd5a" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.324169 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="e994ba19-e00a-4362-8866-130d7bb0bd5a" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.324412 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="e994ba19-e00a-4362-8866-130d7bb0bd5a" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.325395 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.335990 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw"] Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.351058 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.351130 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.351063 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.351360 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.351538 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.381710 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kn47\" (UniqueName: \"kubernetes.io/projected/7314abce-8cbe-45a0-b6de-9b8a03555fc6-kube-api-access-7kn47\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw\" (UID: \"7314abce-8cbe-45a0-b6de-9b8a03555fc6\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.381784 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7314abce-8cbe-45a0-b6de-9b8a03555fc6-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw\" (UID: \"7314abce-8cbe-45a0-b6de-9b8a03555fc6\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.381848 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7314abce-8cbe-45a0-b6de-9b8a03555fc6-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw\" (UID: \"7314abce-8cbe-45a0-b6de-9b8a03555fc6\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.381869 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7314abce-8cbe-45a0-b6de-9b8a03555fc6-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw\" (UID: \"7314abce-8cbe-45a0-b6de-9b8a03555fc6\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.483880 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kn47\" (UniqueName: \"kubernetes.io/projected/7314abce-8cbe-45a0-b6de-9b8a03555fc6-kube-api-access-7kn47\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw\" (UID: \"7314abce-8cbe-45a0-b6de-9b8a03555fc6\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.484207 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7314abce-8cbe-45a0-b6de-9b8a03555fc6-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw\" (UID: \"7314abce-8cbe-45a0-b6de-9b8a03555fc6\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.484275 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7314abce-8cbe-45a0-b6de-9b8a03555fc6-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw\" (UID: \"7314abce-8cbe-45a0-b6de-9b8a03555fc6\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.484301 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7314abce-8cbe-45a0-b6de-9b8a03555fc6-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw\" (UID: \"7314abce-8cbe-45a0-b6de-9b8a03555fc6\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.488500 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7314abce-8cbe-45a0-b6de-9b8a03555fc6-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw\" (UID: \"7314abce-8cbe-45a0-b6de-9b8a03555fc6\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.489019 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7314abce-8cbe-45a0-b6de-9b8a03555fc6-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw\" (UID: \"7314abce-8cbe-45a0-b6de-9b8a03555fc6\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.489371 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7314abce-8cbe-45a0-b6de-9b8a03555fc6-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw\" (UID: \"7314abce-8cbe-45a0-b6de-9b8a03555fc6\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.499387 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kn47\" (UniqueName: \"kubernetes.io/projected/7314abce-8cbe-45a0-b6de-9b8a03555fc6-kube-api-access-7kn47\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw\" (UID: \"7314abce-8cbe-45a0-b6de-9b8a03555fc6\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" Sep 30 01:03:43 crc kubenswrapper[4809]: I0930 01:03:43.681349 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" Sep 30 01:03:44 crc kubenswrapper[4809]: I0930 01:03:44.285004 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw"] Sep 30 01:03:45 crc kubenswrapper[4809]: I0930 01:03:45.243088 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" event={"ID":"7314abce-8cbe-45a0-b6de-9b8a03555fc6","Type":"ContainerStarted","Data":"b97b300fb90e513f06343fdf43c4f1bcc958acad322a7e45a04ba731306e49bb"} Sep 30 01:03:45 crc kubenswrapper[4809]: I0930 01:03:45.243430 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" event={"ID":"7314abce-8cbe-45a0-b6de-9b8a03555fc6","Type":"ContainerStarted","Data":"b7a43b344cab66ffb2acdf6f8014861f0cb76cdde8e3afc942efcd7a1c64cf9f"} Sep 30 01:03:45 crc kubenswrapper[4809]: I0930 01:03:45.257972 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" podStartSLOduration=1.574519767 podStartE2EDuration="2.257949782s" podCreationTimestamp="2025-09-30 01:03:43 +0000 UTC" firstStartedPulling="2025-09-30 01:03:44.291806039 +0000 UTC m=+3275.328055447" lastFinishedPulling="2025-09-30 01:03:44.975236044 +0000 UTC m=+3276.011485462" observedRunningTime="2025-09-30 01:03:45.254419055 +0000 UTC m=+3276.290668473" watchObservedRunningTime="2025-09-30 01:03:45.257949782 +0000 UTC m=+3276.294199200" Sep 30 01:03:50 crc kubenswrapper[4809]: I0930 01:03:50.295357 4809 generic.go:334] "Generic (PLEG): container finished" podID="7314abce-8cbe-45a0-b6de-9b8a03555fc6" containerID="b97b300fb90e513f06343fdf43c4f1bcc958acad322a7e45a04ba731306e49bb" exitCode=0 Sep 30 01:03:50 crc kubenswrapper[4809]: I0930 01:03:50.295442 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" event={"ID":"7314abce-8cbe-45a0-b6de-9b8a03555fc6","Type":"ContainerDied","Data":"b97b300fb90e513f06343fdf43c4f1bcc958acad322a7e45a04ba731306e49bb"} Sep 30 01:03:51 crc kubenswrapper[4809]: I0930 01:03:51.808978 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" Sep 30 01:03:51 crc kubenswrapper[4809]: I0930 01:03:51.875753 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7314abce-8cbe-45a0-b6de-9b8a03555fc6-ssh-key\") pod \"7314abce-8cbe-45a0-b6de-9b8a03555fc6\" (UID: \"7314abce-8cbe-45a0-b6de-9b8a03555fc6\") " Sep 30 01:03:51 crc kubenswrapper[4809]: I0930 01:03:51.875853 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7314abce-8cbe-45a0-b6de-9b8a03555fc6-ceph\") pod \"7314abce-8cbe-45a0-b6de-9b8a03555fc6\" (UID: \"7314abce-8cbe-45a0-b6de-9b8a03555fc6\") " Sep 30 01:03:51 crc kubenswrapper[4809]: I0930 01:03:51.875967 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7kn47\" (UniqueName: \"kubernetes.io/projected/7314abce-8cbe-45a0-b6de-9b8a03555fc6-kube-api-access-7kn47\") pod \"7314abce-8cbe-45a0-b6de-9b8a03555fc6\" (UID: \"7314abce-8cbe-45a0-b6de-9b8a03555fc6\") " Sep 30 01:03:51 crc kubenswrapper[4809]: I0930 01:03:51.876062 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7314abce-8cbe-45a0-b6de-9b8a03555fc6-inventory\") pod \"7314abce-8cbe-45a0-b6de-9b8a03555fc6\" (UID: \"7314abce-8cbe-45a0-b6de-9b8a03555fc6\") " Sep 30 01:03:51 crc kubenswrapper[4809]: I0930 01:03:51.882726 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7314abce-8cbe-45a0-b6de-9b8a03555fc6-kube-api-access-7kn47" (OuterVolumeSpecName: "kube-api-access-7kn47") pod "7314abce-8cbe-45a0-b6de-9b8a03555fc6" (UID: "7314abce-8cbe-45a0-b6de-9b8a03555fc6"). InnerVolumeSpecName "kube-api-access-7kn47". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:03:51 crc kubenswrapper[4809]: I0930 01:03:51.903964 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7314abce-8cbe-45a0-b6de-9b8a03555fc6-ceph" (OuterVolumeSpecName: "ceph") pod "7314abce-8cbe-45a0-b6de-9b8a03555fc6" (UID: "7314abce-8cbe-45a0-b6de-9b8a03555fc6"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:03:51 crc kubenswrapper[4809]: I0930 01:03:51.940861 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7314abce-8cbe-45a0-b6de-9b8a03555fc6-inventory" (OuterVolumeSpecName: "inventory") pod "7314abce-8cbe-45a0-b6de-9b8a03555fc6" (UID: "7314abce-8cbe-45a0-b6de-9b8a03555fc6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:03:51 crc kubenswrapper[4809]: I0930 01:03:51.942860 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7314abce-8cbe-45a0-b6de-9b8a03555fc6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7314abce-8cbe-45a0-b6de-9b8a03555fc6" (UID: "7314abce-8cbe-45a0-b6de-9b8a03555fc6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:03:51 crc kubenswrapper[4809]: I0930 01:03:51.979771 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7314abce-8cbe-45a0-b6de-9b8a03555fc6-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:03:51 crc kubenswrapper[4809]: I0930 01:03:51.979797 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7314abce-8cbe-45a0-b6de-9b8a03555fc6-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:03:51 crc kubenswrapper[4809]: I0930 01:03:51.979893 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7kn47\" (UniqueName: \"kubernetes.io/projected/7314abce-8cbe-45a0-b6de-9b8a03555fc6-kube-api-access-7kn47\") on node \"crc\" DevicePath \"\"" Sep 30 01:03:51 crc kubenswrapper[4809]: I0930 01:03:51.979937 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7314abce-8cbe-45a0-b6de-9b8a03555fc6-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.319587 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" event={"ID":"7314abce-8cbe-45a0-b6de-9b8a03555fc6","Type":"ContainerDied","Data":"b7a43b344cab66ffb2acdf6f8014861f0cb76cdde8e3afc942efcd7a1c64cf9f"} Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.319638 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b7a43b344cab66ffb2acdf6f8014861f0cb76cdde8e3afc942efcd7a1c64cf9f" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.319687 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.446795 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g"] Sep 30 01:03:52 crc kubenswrapper[4809]: E0930 01:03:52.447498 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7314abce-8cbe-45a0-b6de-9b8a03555fc6" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.447529 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="7314abce-8cbe-45a0-b6de-9b8a03555fc6" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.447947 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="7314abce-8cbe-45a0-b6de-9b8a03555fc6" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.449132 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.452688 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.452993 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.453224 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.453459 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.453840 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.476021 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g"] Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.591234 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3de84e93-5356-4496-8a35-dc295412042a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-29s9g\" (UID: \"3de84e93-5356-4496-8a35-dc295412042a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.591370 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72h4x\" (UniqueName: \"kubernetes.io/projected/3de84e93-5356-4496-8a35-dc295412042a-kube-api-access-72h4x\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-29s9g\" (UID: \"3de84e93-5356-4496-8a35-dc295412042a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.591467 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3de84e93-5356-4496-8a35-dc295412042a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-29s9g\" (UID: \"3de84e93-5356-4496-8a35-dc295412042a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.591554 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3de84e93-5356-4496-8a35-dc295412042a-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-29s9g\" (UID: \"3de84e93-5356-4496-8a35-dc295412042a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.694091 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3de84e93-5356-4496-8a35-dc295412042a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-29s9g\" (UID: \"3de84e93-5356-4496-8a35-dc295412042a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.694227 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3de84e93-5356-4496-8a35-dc295412042a-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-29s9g\" (UID: \"3de84e93-5356-4496-8a35-dc295412042a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.694341 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3de84e93-5356-4496-8a35-dc295412042a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-29s9g\" (UID: \"3de84e93-5356-4496-8a35-dc295412042a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.694463 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72h4x\" (UniqueName: \"kubernetes.io/projected/3de84e93-5356-4496-8a35-dc295412042a-kube-api-access-72h4x\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-29s9g\" (UID: \"3de84e93-5356-4496-8a35-dc295412042a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.700003 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3de84e93-5356-4496-8a35-dc295412042a-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-29s9g\" (UID: \"3de84e93-5356-4496-8a35-dc295412042a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.701243 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3de84e93-5356-4496-8a35-dc295412042a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-29s9g\" (UID: \"3de84e93-5356-4496-8a35-dc295412042a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.707322 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3de84e93-5356-4496-8a35-dc295412042a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-29s9g\" (UID: \"3de84e93-5356-4496-8a35-dc295412042a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.718972 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72h4x\" (UniqueName: \"kubernetes.io/projected/3de84e93-5356-4496-8a35-dc295412042a-kube-api-access-72h4x\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-29s9g\" (UID: \"3de84e93-5356-4496-8a35-dc295412042a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" Sep 30 01:03:52 crc kubenswrapper[4809]: I0930 01:03:52.778217 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" Sep 30 01:03:53 crc kubenswrapper[4809]: I0930 01:03:53.349562 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g"] Sep 30 01:03:54 crc kubenswrapper[4809]: I0930 01:03:54.344079 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" event={"ID":"3de84e93-5356-4496-8a35-dc295412042a","Type":"ContainerStarted","Data":"dbeb5fced0f047871d1c2da16760db73f4ebf3b09d26c34c9ed885212c0168c0"} Sep 30 01:03:54 crc kubenswrapper[4809]: I0930 01:03:54.344619 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" event={"ID":"3de84e93-5356-4496-8a35-dc295412042a","Type":"ContainerStarted","Data":"29dd5400e81f89260f2087e6cd2e85f590555c1c513f267ee59e63915f4e7885"} Sep 30 01:03:54 crc kubenswrapper[4809]: I0930 01:03:54.378339 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" podStartSLOduration=1.9423239350000001 podStartE2EDuration="2.378306308s" podCreationTimestamp="2025-09-30 01:03:52 +0000 UTC" firstStartedPulling="2025-09-30 01:03:53.360456791 +0000 UTC m=+3284.396706219" lastFinishedPulling="2025-09-30 01:03:53.796439144 +0000 UTC m=+3284.832688592" observedRunningTime="2025-09-30 01:03:54.364523821 +0000 UTC m=+3285.400773249" watchObservedRunningTime="2025-09-30 01:03:54.378306308 +0000 UTC m=+3285.414555766" Sep 30 01:04:59 crc kubenswrapper[4809]: I0930 01:04:59.133107 4809 generic.go:334] "Generic (PLEG): container finished" podID="3de84e93-5356-4496-8a35-dc295412042a" containerID="dbeb5fced0f047871d1c2da16760db73f4ebf3b09d26c34c9ed885212c0168c0" exitCode=0 Sep 30 01:04:59 crc kubenswrapper[4809]: I0930 01:04:59.133208 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" event={"ID":"3de84e93-5356-4496-8a35-dc295412042a","Type":"ContainerDied","Data":"dbeb5fced0f047871d1c2da16760db73f4ebf3b09d26c34c9ed885212c0168c0"} Sep 30 01:05:00 crc kubenswrapper[4809]: I0930 01:05:00.645294 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" Sep 30 01:05:00 crc kubenswrapper[4809]: I0930 01:05:00.734624 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3de84e93-5356-4496-8a35-dc295412042a-ceph\") pod \"3de84e93-5356-4496-8a35-dc295412042a\" (UID: \"3de84e93-5356-4496-8a35-dc295412042a\") " Sep 30 01:05:00 crc kubenswrapper[4809]: I0930 01:05:00.734822 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72h4x\" (UniqueName: \"kubernetes.io/projected/3de84e93-5356-4496-8a35-dc295412042a-kube-api-access-72h4x\") pod \"3de84e93-5356-4496-8a35-dc295412042a\" (UID: \"3de84e93-5356-4496-8a35-dc295412042a\") " Sep 30 01:05:00 crc kubenswrapper[4809]: I0930 01:05:00.735122 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3de84e93-5356-4496-8a35-dc295412042a-ssh-key\") pod \"3de84e93-5356-4496-8a35-dc295412042a\" (UID: \"3de84e93-5356-4496-8a35-dc295412042a\") " Sep 30 01:05:00 crc kubenswrapper[4809]: I0930 01:05:00.735561 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3de84e93-5356-4496-8a35-dc295412042a-inventory\") pod \"3de84e93-5356-4496-8a35-dc295412042a\" (UID: \"3de84e93-5356-4496-8a35-dc295412042a\") " Sep 30 01:05:00 crc kubenswrapper[4809]: I0930 01:05:00.741854 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3de84e93-5356-4496-8a35-dc295412042a-kube-api-access-72h4x" (OuterVolumeSpecName: "kube-api-access-72h4x") pod "3de84e93-5356-4496-8a35-dc295412042a" (UID: "3de84e93-5356-4496-8a35-dc295412042a"). InnerVolumeSpecName "kube-api-access-72h4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:05:00 crc kubenswrapper[4809]: I0930 01:05:00.757982 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3de84e93-5356-4496-8a35-dc295412042a-ceph" (OuterVolumeSpecName: "ceph") pod "3de84e93-5356-4496-8a35-dc295412042a" (UID: "3de84e93-5356-4496-8a35-dc295412042a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:05:00 crc kubenswrapper[4809]: I0930 01:05:00.768354 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3de84e93-5356-4496-8a35-dc295412042a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3de84e93-5356-4496-8a35-dc295412042a" (UID: "3de84e93-5356-4496-8a35-dc295412042a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:05:00 crc kubenswrapper[4809]: I0930 01:05:00.780242 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3de84e93-5356-4496-8a35-dc295412042a-inventory" (OuterVolumeSpecName: "inventory") pod "3de84e93-5356-4496-8a35-dc295412042a" (UID: "3de84e93-5356-4496-8a35-dc295412042a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:05:00 crc kubenswrapper[4809]: I0930 01:05:00.840130 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72h4x\" (UniqueName: \"kubernetes.io/projected/3de84e93-5356-4496-8a35-dc295412042a-kube-api-access-72h4x\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:00 crc kubenswrapper[4809]: I0930 01:05:00.840164 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3de84e93-5356-4496-8a35-dc295412042a-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:00 crc kubenswrapper[4809]: I0930 01:05:00.840174 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3de84e93-5356-4496-8a35-dc295412042a-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:00 crc kubenswrapper[4809]: I0930 01:05:00.840186 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3de84e93-5356-4496-8a35-dc295412042a-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.185029 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" event={"ID":"3de84e93-5356-4496-8a35-dc295412042a","Type":"ContainerDied","Data":"29dd5400e81f89260f2087e6cd2e85f590555c1c513f267ee59e63915f4e7885"} Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.185064 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-29s9g" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.185084 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29dd5400e81f89260f2087e6cd2e85f590555c1c513f267ee59e63915f4e7885" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.247688 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-c8q95"] Sep 30 01:05:01 crc kubenswrapper[4809]: E0930 01:05:01.248535 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3de84e93-5356-4496-8a35-dc295412042a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.248554 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="3de84e93-5356-4496-8a35-dc295412042a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.248832 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="3de84e93-5356-4496-8a35-dc295412042a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.251430 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.254795 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.254978 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.254910 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.254901 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.255477 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.259875 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-c8q95"] Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.349019 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmsph\" (UniqueName: \"kubernetes.io/projected/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-kube-api-access-xmsph\") pod \"ssh-known-hosts-edpm-deployment-c8q95\" (UID: \"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3\") " pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.349389 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-ceph\") pod \"ssh-known-hosts-edpm-deployment-c8q95\" (UID: \"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3\") " pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.349543 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-c8q95\" (UID: \"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3\") " pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.349606 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-c8q95\" (UID: \"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3\") " pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.451837 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmsph\" (UniqueName: \"kubernetes.io/projected/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-kube-api-access-xmsph\") pod \"ssh-known-hosts-edpm-deployment-c8q95\" (UID: \"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3\") " pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.452496 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-ceph\") pod \"ssh-known-hosts-edpm-deployment-c8q95\" (UID: \"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3\") " pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.452876 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-c8q95\" (UID: \"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3\") " pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.453160 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-c8q95\" (UID: \"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3\") " pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.457493 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-ceph\") pod \"ssh-known-hosts-edpm-deployment-c8q95\" (UID: \"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3\") " pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.457569 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-c8q95\" (UID: \"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3\") " pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.460670 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-c8q95\" (UID: \"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3\") " pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.485414 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmsph\" (UniqueName: \"kubernetes.io/projected/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-kube-api-access-xmsph\") pod \"ssh-known-hosts-edpm-deployment-c8q95\" (UID: \"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3\") " pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" Sep 30 01:05:01 crc kubenswrapper[4809]: I0930 01:05:01.577015 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" Sep 30 01:05:02 crc kubenswrapper[4809]: I0930 01:05:02.312354 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-c8q95"] Sep 30 01:05:03 crc kubenswrapper[4809]: I0930 01:05:03.212575 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" event={"ID":"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3","Type":"ContainerStarted","Data":"e6078ea5a1e6f7ecffb9de8ef988944d57609df0dc3a7df39bf132470b188739"} Sep 30 01:05:04 crc kubenswrapper[4809]: I0930 01:05:04.227094 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" event={"ID":"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3","Type":"ContainerStarted","Data":"fee7b9c03015697e0a1fdd0060df5a2302a544ee7b82c3402db4ea604c51c2f3"} Sep 30 01:05:04 crc kubenswrapper[4809]: I0930 01:05:04.252910 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" podStartSLOduration=2.607582498 podStartE2EDuration="3.252891619s" podCreationTimestamp="2025-09-30 01:05:01 +0000 UTC" firstStartedPulling="2025-09-30 01:05:02.325417666 +0000 UTC m=+3353.361667074" lastFinishedPulling="2025-09-30 01:05:02.970726757 +0000 UTC m=+3354.006976195" observedRunningTime="2025-09-30 01:05:04.246021642 +0000 UTC m=+3355.282271060" watchObservedRunningTime="2025-09-30 01:05:04.252891619 +0000 UTC m=+3355.289141027" Sep 30 01:05:16 crc kubenswrapper[4809]: I0930 01:05:16.362335 4809 generic.go:334] "Generic (PLEG): container finished" podID="189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3" containerID="fee7b9c03015697e0a1fdd0060df5a2302a544ee7b82c3402db4ea604c51c2f3" exitCode=0 Sep 30 01:05:16 crc kubenswrapper[4809]: I0930 01:05:16.362405 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" event={"ID":"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3","Type":"ContainerDied","Data":"fee7b9c03015697e0a1fdd0060df5a2302a544ee7b82c3402db4ea604c51c2f3"} Sep 30 01:05:17 crc kubenswrapper[4809]: I0930 01:05:17.833825 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" Sep 30 01:05:17 crc kubenswrapper[4809]: I0930 01:05:17.944019 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-ssh-key-openstack-edpm-ipam\") pod \"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3\" (UID: \"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3\") " Sep 30 01:05:17 crc kubenswrapper[4809]: I0930 01:05:17.944163 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-ceph\") pod \"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3\" (UID: \"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3\") " Sep 30 01:05:17 crc kubenswrapper[4809]: I0930 01:05:17.944289 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmsph\" (UniqueName: \"kubernetes.io/projected/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-kube-api-access-xmsph\") pod \"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3\" (UID: \"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3\") " Sep 30 01:05:17 crc kubenswrapper[4809]: I0930 01:05:17.944414 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-inventory-0\") pod \"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3\" (UID: \"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3\") " Sep 30 01:05:17 crc kubenswrapper[4809]: I0930 01:05:17.949615 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-ceph" (OuterVolumeSpecName: "ceph") pod "189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3" (UID: "189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:05:17 crc kubenswrapper[4809]: I0930 01:05:17.954988 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-kube-api-access-xmsph" (OuterVolumeSpecName: "kube-api-access-xmsph") pod "189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3" (UID: "189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3"). InnerVolumeSpecName "kube-api-access-xmsph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:05:17 crc kubenswrapper[4809]: I0930 01:05:17.983582 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3" (UID: "189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:05:17 crc kubenswrapper[4809]: I0930 01:05:17.985868 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3" (UID: "189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.047164 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.047203 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.047217 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmsph\" (UniqueName: \"kubernetes.io/projected/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-kube-api-access-xmsph\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.047229 4809 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3-inventory-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.397136 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" event={"ID":"189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3","Type":"ContainerDied","Data":"e6078ea5a1e6f7ecffb9de8ef988944d57609df0dc3a7df39bf132470b188739"} Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.397535 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6078ea5a1e6f7ecffb9de8ef988944d57609df0dc3a7df39bf132470b188739" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.397254 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-c8q95" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.474163 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r"] Sep 30 01:05:18 crc kubenswrapper[4809]: E0930 01:05:18.474752 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3" containerName="ssh-known-hosts-edpm-deployment" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.474777 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3" containerName="ssh-known-hosts-edpm-deployment" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.475030 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3" containerName="ssh-known-hosts-edpm-deployment" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.475803 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.481326 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.481427 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.481577 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.481786 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.482158 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.485329 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r"] Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.557832 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3f31de59-0674-4a21-bfff-74c55467ca17-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t5q7r\" (UID: \"3f31de59-0674-4a21-bfff-74c55467ca17\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.558184 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f31de59-0674-4a21-bfff-74c55467ca17-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t5q7r\" (UID: \"3f31de59-0674-4a21-bfff-74c55467ca17\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.558386 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmpxv\" (UniqueName: \"kubernetes.io/projected/3f31de59-0674-4a21-bfff-74c55467ca17-kube-api-access-nmpxv\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t5q7r\" (UID: \"3f31de59-0674-4a21-bfff-74c55467ca17\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.558475 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f31de59-0674-4a21-bfff-74c55467ca17-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t5q7r\" (UID: \"3f31de59-0674-4a21-bfff-74c55467ca17\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.660266 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3f31de59-0674-4a21-bfff-74c55467ca17-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t5q7r\" (UID: \"3f31de59-0674-4a21-bfff-74c55467ca17\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.660454 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f31de59-0674-4a21-bfff-74c55467ca17-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t5q7r\" (UID: \"3f31de59-0674-4a21-bfff-74c55467ca17\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.660511 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmpxv\" (UniqueName: \"kubernetes.io/projected/3f31de59-0674-4a21-bfff-74c55467ca17-kube-api-access-nmpxv\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t5q7r\" (UID: \"3f31de59-0674-4a21-bfff-74c55467ca17\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.660557 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f31de59-0674-4a21-bfff-74c55467ca17-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t5q7r\" (UID: \"3f31de59-0674-4a21-bfff-74c55467ca17\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.664873 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f31de59-0674-4a21-bfff-74c55467ca17-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t5q7r\" (UID: \"3f31de59-0674-4a21-bfff-74c55467ca17\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.665054 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f31de59-0674-4a21-bfff-74c55467ca17-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t5q7r\" (UID: \"3f31de59-0674-4a21-bfff-74c55467ca17\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.665245 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3f31de59-0674-4a21-bfff-74c55467ca17-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t5q7r\" (UID: \"3f31de59-0674-4a21-bfff-74c55467ca17\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.688613 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmpxv\" (UniqueName: \"kubernetes.io/projected/3f31de59-0674-4a21-bfff-74c55467ca17-kube-api-access-nmpxv\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t5q7r\" (UID: \"3f31de59-0674-4a21-bfff-74c55467ca17\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" Sep 30 01:05:18 crc kubenswrapper[4809]: I0930 01:05:18.819509 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" Sep 30 01:05:19 crc kubenswrapper[4809]: I0930 01:05:19.517956 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r"] Sep 30 01:05:20 crc kubenswrapper[4809]: I0930 01:05:20.422533 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" event={"ID":"3f31de59-0674-4a21-bfff-74c55467ca17","Type":"ContainerStarted","Data":"2c490b32b37a90a55ca4795c15a7469521f392b200645204a293dd27843f8138"} Sep 30 01:05:20 crc kubenswrapper[4809]: I0930 01:05:20.422962 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" event={"ID":"3f31de59-0674-4a21-bfff-74c55467ca17","Type":"ContainerStarted","Data":"5f9a2212c6871315e953913771e85fea081c4f930426f1168b9451263b347b98"} Sep 30 01:05:20 crc kubenswrapper[4809]: I0930 01:05:20.456990 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" podStartSLOduration=2.013023431 podStartE2EDuration="2.456962451s" podCreationTimestamp="2025-09-30 01:05:18 +0000 UTC" firstStartedPulling="2025-09-30 01:05:19.521415637 +0000 UTC m=+3370.557665045" lastFinishedPulling="2025-09-30 01:05:19.965354657 +0000 UTC m=+3371.001604065" observedRunningTime="2025-09-30 01:05:20.444514931 +0000 UTC m=+3371.480764379" watchObservedRunningTime="2025-09-30 01:05:20.456962451 +0000 UTC m=+3371.493211899" Sep 30 01:05:25 crc kubenswrapper[4809]: I0930 01:05:25.325218 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:05:25 crc kubenswrapper[4809]: I0930 01:05:25.325633 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:05:30 crc kubenswrapper[4809]: I0930 01:05:30.565484 4809 generic.go:334] "Generic (PLEG): container finished" podID="3f31de59-0674-4a21-bfff-74c55467ca17" containerID="2c490b32b37a90a55ca4795c15a7469521f392b200645204a293dd27843f8138" exitCode=0 Sep 30 01:05:30 crc kubenswrapper[4809]: I0930 01:05:30.565611 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" event={"ID":"3f31de59-0674-4a21-bfff-74c55467ca17","Type":"ContainerDied","Data":"2c490b32b37a90a55ca4795c15a7469521f392b200645204a293dd27843f8138"} Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.113588 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.188215 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f31de59-0674-4a21-bfff-74c55467ca17-ssh-key\") pod \"3f31de59-0674-4a21-bfff-74c55467ca17\" (UID: \"3f31de59-0674-4a21-bfff-74c55467ca17\") " Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.188280 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3f31de59-0674-4a21-bfff-74c55467ca17-ceph\") pod \"3f31de59-0674-4a21-bfff-74c55467ca17\" (UID: \"3f31de59-0674-4a21-bfff-74c55467ca17\") " Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.188345 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f31de59-0674-4a21-bfff-74c55467ca17-inventory\") pod \"3f31de59-0674-4a21-bfff-74c55467ca17\" (UID: \"3f31de59-0674-4a21-bfff-74c55467ca17\") " Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.188443 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmpxv\" (UniqueName: \"kubernetes.io/projected/3f31de59-0674-4a21-bfff-74c55467ca17-kube-api-access-nmpxv\") pod \"3f31de59-0674-4a21-bfff-74c55467ca17\" (UID: \"3f31de59-0674-4a21-bfff-74c55467ca17\") " Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.202987 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f31de59-0674-4a21-bfff-74c55467ca17-ceph" (OuterVolumeSpecName: "ceph") pod "3f31de59-0674-4a21-bfff-74c55467ca17" (UID: "3f31de59-0674-4a21-bfff-74c55467ca17"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.203024 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f31de59-0674-4a21-bfff-74c55467ca17-kube-api-access-nmpxv" (OuterVolumeSpecName: "kube-api-access-nmpxv") pod "3f31de59-0674-4a21-bfff-74c55467ca17" (UID: "3f31de59-0674-4a21-bfff-74c55467ca17"). InnerVolumeSpecName "kube-api-access-nmpxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.226316 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f31de59-0674-4a21-bfff-74c55467ca17-inventory" (OuterVolumeSpecName: "inventory") pod "3f31de59-0674-4a21-bfff-74c55467ca17" (UID: "3f31de59-0674-4a21-bfff-74c55467ca17"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.237103 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f31de59-0674-4a21-bfff-74c55467ca17-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3f31de59-0674-4a21-bfff-74c55467ca17" (UID: "3f31de59-0674-4a21-bfff-74c55467ca17"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.291505 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmpxv\" (UniqueName: \"kubernetes.io/projected/3f31de59-0674-4a21-bfff-74c55467ca17-kube-api-access-nmpxv\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.291542 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f31de59-0674-4a21-bfff-74c55467ca17-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.291556 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3f31de59-0674-4a21-bfff-74c55467ca17-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.291570 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f31de59-0674-4a21-bfff-74c55467ca17-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.583840 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" event={"ID":"3f31de59-0674-4a21-bfff-74c55467ca17","Type":"ContainerDied","Data":"5f9a2212c6871315e953913771e85fea081c4f930426f1168b9451263b347b98"} Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.584131 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f9a2212c6871315e953913771e85fea081c4f930426f1168b9451263b347b98" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.583964 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t5q7r" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.694400 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m"] Sep 30 01:05:32 crc kubenswrapper[4809]: E0930 01:05:32.694990 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f31de59-0674-4a21-bfff-74c55467ca17" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.695083 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f31de59-0674-4a21-bfff-74c55467ca17" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.695334 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f31de59-0674-4a21-bfff-74c55467ca17" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.696155 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.699221 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.699314 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.699496 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.699985 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.700103 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.717370 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m"] Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.801996 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/deddce34-8531-4212-84b9-38ae8445dd7c-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m\" (UID: \"deddce34-8531-4212-84b9-38ae8445dd7c\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.802198 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/deddce34-8531-4212-84b9-38ae8445dd7c-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m\" (UID: \"deddce34-8531-4212-84b9-38ae8445dd7c\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.802226 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldtjc\" (UniqueName: \"kubernetes.io/projected/deddce34-8531-4212-84b9-38ae8445dd7c-kube-api-access-ldtjc\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m\" (UID: \"deddce34-8531-4212-84b9-38ae8445dd7c\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.802335 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/deddce34-8531-4212-84b9-38ae8445dd7c-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m\" (UID: \"deddce34-8531-4212-84b9-38ae8445dd7c\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.904009 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldtjc\" (UniqueName: \"kubernetes.io/projected/deddce34-8531-4212-84b9-38ae8445dd7c-kube-api-access-ldtjc\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m\" (UID: \"deddce34-8531-4212-84b9-38ae8445dd7c\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.904276 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/deddce34-8531-4212-84b9-38ae8445dd7c-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m\" (UID: \"deddce34-8531-4212-84b9-38ae8445dd7c\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.904394 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/deddce34-8531-4212-84b9-38ae8445dd7c-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m\" (UID: \"deddce34-8531-4212-84b9-38ae8445dd7c\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.904689 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/deddce34-8531-4212-84b9-38ae8445dd7c-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m\" (UID: \"deddce34-8531-4212-84b9-38ae8445dd7c\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.909268 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/deddce34-8531-4212-84b9-38ae8445dd7c-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m\" (UID: \"deddce34-8531-4212-84b9-38ae8445dd7c\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.909538 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/deddce34-8531-4212-84b9-38ae8445dd7c-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m\" (UID: \"deddce34-8531-4212-84b9-38ae8445dd7c\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.909820 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/deddce34-8531-4212-84b9-38ae8445dd7c-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m\" (UID: \"deddce34-8531-4212-84b9-38ae8445dd7c\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" Sep 30 01:05:32 crc kubenswrapper[4809]: I0930 01:05:32.933369 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldtjc\" (UniqueName: \"kubernetes.io/projected/deddce34-8531-4212-84b9-38ae8445dd7c-kube-api-access-ldtjc\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m\" (UID: \"deddce34-8531-4212-84b9-38ae8445dd7c\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" Sep 30 01:05:33 crc kubenswrapper[4809]: I0930 01:05:33.016453 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" Sep 30 01:05:33 crc kubenswrapper[4809]: I0930 01:05:33.751857 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m"] Sep 30 01:05:34 crc kubenswrapper[4809]: I0930 01:05:34.630077 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" event={"ID":"deddce34-8531-4212-84b9-38ae8445dd7c","Type":"ContainerStarted","Data":"87e17a7808635369ec943b28e6c10bc6e7a5170781dcf349a9f45a7ef83131c3"} Sep 30 01:05:34 crc kubenswrapper[4809]: I0930 01:05:34.630451 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" event={"ID":"deddce34-8531-4212-84b9-38ae8445dd7c","Type":"ContainerStarted","Data":"0bdff5cb36a8761f365fd94d4702964e0328f6d4b755e1c9c29206288af066aa"} Sep 30 01:05:34 crc kubenswrapper[4809]: I0930 01:05:34.650115 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" podStartSLOduration=2.220111968 podStartE2EDuration="2.650096057s" podCreationTimestamp="2025-09-30 01:05:32 +0000 UTC" firstStartedPulling="2025-09-30 01:05:33.774684707 +0000 UTC m=+3384.810934115" lastFinishedPulling="2025-09-30 01:05:34.204668796 +0000 UTC m=+3385.240918204" observedRunningTime="2025-09-30 01:05:34.646877459 +0000 UTC m=+3385.683126867" watchObservedRunningTime="2025-09-30 01:05:34.650096057 +0000 UTC m=+3385.686345475" Sep 30 01:05:47 crc kubenswrapper[4809]: I0930 01:05:47.775843 4809 generic.go:334] "Generic (PLEG): container finished" podID="deddce34-8531-4212-84b9-38ae8445dd7c" containerID="87e17a7808635369ec943b28e6c10bc6e7a5170781dcf349a9f45a7ef83131c3" exitCode=0 Sep 30 01:05:47 crc kubenswrapper[4809]: I0930 01:05:47.775960 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" event={"ID":"deddce34-8531-4212-84b9-38ae8445dd7c","Type":"ContainerDied","Data":"87e17a7808635369ec943b28e6c10bc6e7a5170781dcf349a9f45a7ef83131c3"} Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.306975 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.372593 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/deddce34-8531-4212-84b9-38ae8445dd7c-ceph\") pod \"deddce34-8531-4212-84b9-38ae8445dd7c\" (UID: \"deddce34-8531-4212-84b9-38ae8445dd7c\") " Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.373157 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/deddce34-8531-4212-84b9-38ae8445dd7c-inventory\") pod \"deddce34-8531-4212-84b9-38ae8445dd7c\" (UID: \"deddce34-8531-4212-84b9-38ae8445dd7c\") " Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.373258 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/deddce34-8531-4212-84b9-38ae8445dd7c-ssh-key\") pod \"deddce34-8531-4212-84b9-38ae8445dd7c\" (UID: \"deddce34-8531-4212-84b9-38ae8445dd7c\") " Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.373402 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldtjc\" (UniqueName: \"kubernetes.io/projected/deddce34-8531-4212-84b9-38ae8445dd7c-kube-api-access-ldtjc\") pod \"deddce34-8531-4212-84b9-38ae8445dd7c\" (UID: \"deddce34-8531-4212-84b9-38ae8445dd7c\") " Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.382963 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/deddce34-8531-4212-84b9-38ae8445dd7c-kube-api-access-ldtjc" (OuterVolumeSpecName: "kube-api-access-ldtjc") pod "deddce34-8531-4212-84b9-38ae8445dd7c" (UID: "deddce34-8531-4212-84b9-38ae8445dd7c"). InnerVolumeSpecName "kube-api-access-ldtjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.383990 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/deddce34-8531-4212-84b9-38ae8445dd7c-ceph" (OuterVolumeSpecName: "ceph") pod "deddce34-8531-4212-84b9-38ae8445dd7c" (UID: "deddce34-8531-4212-84b9-38ae8445dd7c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.403286 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/deddce34-8531-4212-84b9-38ae8445dd7c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "deddce34-8531-4212-84b9-38ae8445dd7c" (UID: "deddce34-8531-4212-84b9-38ae8445dd7c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.416712 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/deddce34-8531-4212-84b9-38ae8445dd7c-inventory" (OuterVolumeSpecName: "inventory") pod "deddce34-8531-4212-84b9-38ae8445dd7c" (UID: "deddce34-8531-4212-84b9-38ae8445dd7c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.477007 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldtjc\" (UniqueName: \"kubernetes.io/projected/deddce34-8531-4212-84b9-38ae8445dd7c-kube-api-access-ldtjc\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.477173 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/deddce34-8531-4212-84b9-38ae8445dd7c-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.477278 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/deddce34-8531-4212-84b9-38ae8445dd7c-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.477360 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/deddce34-8531-4212-84b9-38ae8445dd7c-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.794666 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" event={"ID":"deddce34-8531-4212-84b9-38ae8445dd7c","Type":"ContainerDied","Data":"0bdff5cb36a8761f365fd94d4702964e0328f6d4b755e1c9c29206288af066aa"} Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.794703 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0bdff5cb36a8761f365fd94d4702964e0328f6d4b755e1c9c29206288af066aa" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.794777 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.895018 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d"] Sep 30 01:05:49 crc kubenswrapper[4809]: E0930 01:05:49.895461 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="deddce34-8531-4212-84b9-38ae8445dd7c" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.895477 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="deddce34-8531-4212-84b9-38ae8445dd7c" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.895706 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="deddce34-8531-4212-84b9-38ae8445dd7c" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.896386 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.899874 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.900060 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.900206 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.900332 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.900462 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.900595 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.900653 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.900663 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.902937 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.903166 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.917458 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d"] Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.988155 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-telemetry-power-monitoring-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.988203 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.988225 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.988272 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.988351 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.988374 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grvtx\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-kube-api-access-grvtx\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.988403 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.988419 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.988434 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.988480 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.988517 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.988532 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.988585 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.988614 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.988665 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.988692 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:49 crc kubenswrapper[4809]: I0930 01:05:49.988717 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.090569 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.090621 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.090688 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.090723 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-telemetry-power-monitoring-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.090757 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.090783 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.090835 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.090886 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.090914 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grvtx\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-kube-api-access-grvtx\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.090949 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.090970 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.090990 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.091029 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.091076 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.091099 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.091131 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.091163 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.095662 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.096285 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.098267 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.098960 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.099483 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.099501 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.099822 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-telemetry-power-monitoring-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.100053 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.100147 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.100624 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.100871 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.100955 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.102984 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.109466 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.109479 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.114890 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.114964 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grvtx\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-kube-api-access-grvtx\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.215004 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.674588 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d"] Sep 30 01:05:50 crc kubenswrapper[4809]: W0930 01:05:50.679700 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcd9b1a91_564f_405f_8fbf_b148dedf9948.slice/crio-09a0dad98d119dabfca12841e07c90883c4be8a71104aa12d48b1af1a4688ee5 WatchSource:0}: Error finding container 09a0dad98d119dabfca12841e07c90883c4be8a71104aa12d48b1af1a4688ee5: Status 404 returned error can't find the container with id 09a0dad98d119dabfca12841e07c90883c4be8a71104aa12d48b1af1a4688ee5 Sep 30 01:05:50 crc kubenswrapper[4809]: I0930 01:05:50.806367 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" event={"ID":"cd9b1a91-564f-405f-8fbf-b148dedf9948","Type":"ContainerStarted","Data":"09a0dad98d119dabfca12841e07c90883c4be8a71104aa12d48b1af1a4688ee5"} Sep 30 01:05:51 crc kubenswrapper[4809]: I0930 01:05:51.822717 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" event={"ID":"cd9b1a91-564f-405f-8fbf-b148dedf9948","Type":"ContainerStarted","Data":"53c83b58e393c1a78e0399fe4309460c1aa65487a775a0ad4a43a993e2c6ff63"} Sep 30 01:05:51 crc kubenswrapper[4809]: I0930 01:05:51.850465 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" podStartSLOduration=2.286905533 podStartE2EDuration="2.850433236s" podCreationTimestamp="2025-09-30 01:05:49 +0000 UTC" firstStartedPulling="2025-09-30 01:05:50.681945656 +0000 UTC m=+3401.718195064" lastFinishedPulling="2025-09-30 01:05:51.245473359 +0000 UTC m=+3402.281722767" observedRunningTime="2025-09-30 01:05:51.846157649 +0000 UTC m=+3402.882407057" watchObservedRunningTime="2025-09-30 01:05:51.850433236 +0000 UTC m=+3402.886682684" Sep 30 01:05:55 crc kubenswrapper[4809]: I0930 01:05:55.324465 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:05:55 crc kubenswrapper[4809]: I0930 01:05:55.325166 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:06:25 crc kubenswrapper[4809]: I0930 01:06:25.325077 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:06:25 crc kubenswrapper[4809]: I0930 01:06:25.325726 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:06:25 crc kubenswrapper[4809]: I0930 01:06:25.325809 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 01:06:25 crc kubenswrapper[4809]: I0930 01:06:25.326688 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"207c364356b5c57377501c5f3090f0528fd819671371266d7c3d3673523548d3"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 01:06:25 crc kubenswrapper[4809]: I0930 01:06:25.326756 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://207c364356b5c57377501c5f3090f0528fd819671371266d7c3d3673523548d3" gracePeriod=600 Sep 30 01:06:26 crc kubenswrapper[4809]: I0930 01:06:26.215517 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="207c364356b5c57377501c5f3090f0528fd819671371266d7c3d3673523548d3" exitCode=0 Sep 30 01:06:26 crc kubenswrapper[4809]: I0930 01:06:26.215620 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"207c364356b5c57377501c5f3090f0528fd819671371266d7c3d3673523548d3"} Sep 30 01:06:26 crc kubenswrapper[4809]: I0930 01:06:26.216114 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6"} Sep 30 01:06:26 crc kubenswrapper[4809]: I0930 01:06:26.216141 4809 scope.go:117] "RemoveContainer" containerID="dde6b3e1a833743cb01eb179515c448878617ae3ed79e4a7f839e3291f8481c9" Sep 30 01:06:58 crc kubenswrapper[4809]: I0930 01:06:58.556591 4809 generic.go:334] "Generic (PLEG): container finished" podID="cd9b1a91-564f-405f-8fbf-b148dedf9948" containerID="53c83b58e393c1a78e0399fe4309460c1aa65487a775a0ad4a43a993e2c6ff63" exitCode=0 Sep 30 01:06:58 crc kubenswrapper[4809]: I0930 01:06:58.556700 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" event={"ID":"cd9b1a91-564f-405f-8fbf-b148dedf9948","Type":"ContainerDied","Data":"53c83b58e393c1a78e0399fe4309460c1aa65487a775a0ad4a43a993e2c6ff63"} Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.120695 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.187096 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"cd9b1a91-564f-405f-8fbf-b148dedf9948\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.187165 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grvtx\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-kube-api-access-grvtx\") pod \"cd9b1a91-564f-405f-8fbf-b148dedf9948\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.187197 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"cd9b1a91-564f-405f-8fbf-b148dedf9948\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.187233 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\") pod \"cd9b1a91-564f-405f-8fbf-b148dedf9948\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.187280 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"cd9b1a91-564f-405f-8fbf-b148dedf9948\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.187312 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-telemetry-combined-ca-bundle\") pod \"cd9b1a91-564f-405f-8fbf-b148dedf9948\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.187366 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-repo-setup-combined-ca-bundle\") pod \"cd9b1a91-564f-405f-8fbf-b148dedf9948\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.187408 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-ssh-key\") pod \"cd9b1a91-564f-405f-8fbf-b148dedf9948\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.187454 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-ceph\") pod \"cd9b1a91-564f-405f-8fbf-b148dedf9948\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.187549 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-nova-combined-ca-bundle\") pod \"cd9b1a91-564f-405f-8fbf-b148dedf9948\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.187575 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-ovn-combined-ca-bundle\") pod \"cd9b1a91-564f-405f-8fbf-b148dedf9948\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.187620 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-neutron-metadata-combined-ca-bundle\") pod \"cd9b1a91-564f-405f-8fbf-b148dedf9948\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.188468 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-telemetry-power-monitoring-combined-ca-bundle\") pod \"cd9b1a91-564f-405f-8fbf-b148dedf9948\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.188553 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-inventory\") pod \"cd9b1a91-564f-405f-8fbf-b148dedf9948\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.188595 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-bootstrap-combined-ca-bundle\") pod \"cd9b1a91-564f-405f-8fbf-b148dedf9948\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.188631 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-libvirt-combined-ca-bundle\") pod \"cd9b1a91-564f-405f-8fbf-b148dedf9948\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.188686 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-ovn-default-certs-0\") pod \"cd9b1a91-564f-405f-8fbf-b148dedf9948\" (UID: \"cd9b1a91-564f-405f-8fbf-b148dedf9948\") " Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.195472 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "cd9b1a91-564f-405f-8fbf-b148dedf9948" (UID: "cd9b1a91-564f-405f-8fbf-b148dedf9948"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.195519 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "cd9b1a91-564f-405f-8fbf-b148dedf9948" (UID: "cd9b1a91-564f-405f-8fbf-b148dedf9948"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.195669 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "cd9b1a91-564f-405f-8fbf-b148dedf9948" (UID: "cd9b1a91-564f-405f-8fbf-b148dedf9948"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.198157 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "cd9b1a91-564f-405f-8fbf-b148dedf9948" (UID: "cd9b1a91-564f-405f-8fbf-b148dedf9948"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.207240 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "cd9b1a91-564f-405f-8fbf-b148dedf9948" (UID: "cd9b1a91-564f-405f-8fbf-b148dedf9948"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.208172 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0") pod "cd9b1a91-564f-405f-8fbf-b148dedf9948" (UID: "cd9b1a91-564f-405f-8fbf-b148dedf9948"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.209011 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-kube-api-access-grvtx" (OuterVolumeSpecName: "kube-api-access-grvtx") pod "cd9b1a91-564f-405f-8fbf-b148dedf9948" (UID: "cd9b1a91-564f-405f-8fbf-b148dedf9948"). InnerVolumeSpecName "kube-api-access-grvtx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.209008 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "cd9b1a91-564f-405f-8fbf-b148dedf9948" (UID: "cd9b1a91-564f-405f-8fbf-b148dedf9948"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.210317 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "cd9b1a91-564f-405f-8fbf-b148dedf9948" (UID: "cd9b1a91-564f-405f-8fbf-b148dedf9948"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.210975 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "cd9b1a91-564f-405f-8fbf-b148dedf9948" (UID: "cd9b1a91-564f-405f-8fbf-b148dedf9948"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.211139 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-telemetry-power-monitoring-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-power-monitoring-combined-ca-bundle") pod "cd9b1a91-564f-405f-8fbf-b148dedf9948" (UID: "cd9b1a91-564f-405f-8fbf-b148dedf9948"). InnerVolumeSpecName "telemetry-power-monitoring-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.211327 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-ceph" (OuterVolumeSpecName: "ceph") pod "cd9b1a91-564f-405f-8fbf-b148dedf9948" (UID: "cd9b1a91-564f-405f-8fbf-b148dedf9948"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.211195 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "cd9b1a91-564f-405f-8fbf-b148dedf9948" (UID: "cd9b1a91-564f-405f-8fbf-b148dedf9948"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.211734 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "cd9b1a91-564f-405f-8fbf-b148dedf9948" (UID: "cd9b1a91-564f-405f-8fbf-b148dedf9948"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.218021 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "cd9b1a91-564f-405f-8fbf-b148dedf9948" (UID: "cd9b1a91-564f-405f-8fbf-b148dedf9948"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.247346 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-inventory" (OuterVolumeSpecName: "inventory") pod "cd9b1a91-564f-405f-8fbf-b148dedf9948" (UID: "cd9b1a91-564f-405f-8fbf-b148dedf9948"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.254746 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "cd9b1a91-564f-405f-8fbf-b148dedf9948" (UID: "cd9b1a91-564f-405f-8fbf-b148dedf9948"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.291867 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.291910 4809 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.291925 4809 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.291938 4809 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.291953 4809 reconciler_common.go:293] "Volume detached for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-telemetry-power-monitoring-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.291999 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.292012 4809 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.292024 4809 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.292036 4809 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.292048 4809 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.292061 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grvtx\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-kube-api-access-grvtx\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.292074 4809 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.292089 4809 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.292105 4809 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cd9b1a91-564f-405f-8fbf-b148dedf9948-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.292118 4809 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.292132 4809 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.292147 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cd9b1a91-564f-405f-8fbf-b148dedf9948-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.580717 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" event={"ID":"cd9b1a91-564f-405f-8fbf-b148dedf9948","Type":"ContainerDied","Data":"09a0dad98d119dabfca12841e07c90883c4be8a71104aa12d48b1af1a4688ee5"} Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.580755 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09a0dad98d119dabfca12841e07c90883c4be8a71104aa12d48b1af1a4688ee5" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.580786 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.704184 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7"] Sep 30 01:07:00 crc kubenswrapper[4809]: E0930 01:07:00.706064 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd9b1a91-564f-405f-8fbf-b148dedf9948" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.706093 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd9b1a91-564f-405f-8fbf-b148dedf9948" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.706373 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd9b1a91-564f-405f-8fbf-b148dedf9948" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.708269 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.711250 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.711470 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.711696 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.711828 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.716942 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.738322 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7"] Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.804232 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7\" (UID: \"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.804379 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7\" (UID: \"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.804548 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgzrz\" (UniqueName: \"kubernetes.io/projected/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-kube-api-access-wgzrz\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7\" (UID: \"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.804715 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7\" (UID: \"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.906657 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgzrz\" (UniqueName: \"kubernetes.io/projected/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-kube-api-access-wgzrz\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7\" (UID: \"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.906802 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7\" (UID: \"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.906856 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7\" (UID: \"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.906905 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7\" (UID: \"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.910870 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7\" (UID: \"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.911022 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7\" (UID: \"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.911229 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7\" (UID: \"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" Sep 30 01:07:00 crc kubenswrapper[4809]: I0930 01:07:00.923102 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgzrz\" (UniqueName: \"kubernetes.io/projected/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-kube-api-access-wgzrz\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7\" (UID: \"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" Sep 30 01:07:01 crc kubenswrapper[4809]: I0930 01:07:01.044009 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" Sep 30 01:07:01 crc kubenswrapper[4809]: W0930 01:07:01.606988 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31110c5c_0cbc_4a43_a2ff_36c7fd353e5d.slice/crio-007450e0e4076563852d0f14a86db80e1bdd4695787a9a0814487b69ac903a05 WatchSource:0}: Error finding container 007450e0e4076563852d0f14a86db80e1bdd4695787a9a0814487b69ac903a05: Status 404 returned error can't find the container with id 007450e0e4076563852d0f14a86db80e1bdd4695787a9a0814487b69ac903a05 Sep 30 01:07:01 crc kubenswrapper[4809]: I0930 01:07:01.609525 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7"] Sep 30 01:07:02 crc kubenswrapper[4809]: I0930 01:07:02.600470 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" event={"ID":"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d","Type":"ContainerStarted","Data":"758f2d4722f9405977603a4442ba9e4c1c78c852f85a278ed2269bcb80823f1e"} Sep 30 01:07:02 crc kubenswrapper[4809]: I0930 01:07:02.601140 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" event={"ID":"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d","Type":"ContainerStarted","Data":"007450e0e4076563852d0f14a86db80e1bdd4695787a9a0814487b69ac903a05"} Sep 30 01:07:02 crc kubenswrapper[4809]: I0930 01:07:02.620058 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" podStartSLOduration=2.092653569 podStartE2EDuration="2.620031872s" podCreationTimestamp="2025-09-30 01:07:00 +0000 UTC" firstStartedPulling="2025-09-30 01:07:01.609456794 +0000 UTC m=+3472.645706202" lastFinishedPulling="2025-09-30 01:07:02.136835097 +0000 UTC m=+3473.173084505" observedRunningTime="2025-09-30 01:07:02.614611833 +0000 UTC m=+3473.650861291" watchObservedRunningTime="2025-09-30 01:07:02.620031872 +0000 UTC m=+3473.656281310" Sep 30 01:07:09 crc kubenswrapper[4809]: I0930 01:07:09.685821 4809 generic.go:334] "Generic (PLEG): container finished" podID="31110c5c-0cbc-4a43-a2ff-36c7fd353e5d" containerID="758f2d4722f9405977603a4442ba9e4c1c78c852f85a278ed2269bcb80823f1e" exitCode=0 Sep 30 01:07:09 crc kubenswrapper[4809]: I0930 01:07:09.685967 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" event={"ID":"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d","Type":"ContainerDied","Data":"758f2d4722f9405977603a4442ba9e4c1c78c852f85a278ed2269bcb80823f1e"} Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.148424 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.257400 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-inventory\") pod \"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d\" (UID: \"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d\") " Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.257541 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-ceph\") pod \"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d\" (UID: \"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d\") " Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.257579 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-ssh-key\") pod \"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d\" (UID: \"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d\") " Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.257627 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgzrz\" (UniqueName: \"kubernetes.io/projected/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-kube-api-access-wgzrz\") pod \"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d\" (UID: \"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d\") " Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.268936 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-ceph" (OuterVolumeSpecName: "ceph") pod "31110c5c-0cbc-4a43-a2ff-36c7fd353e5d" (UID: "31110c5c-0cbc-4a43-a2ff-36c7fd353e5d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.279756 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-kube-api-access-wgzrz" (OuterVolumeSpecName: "kube-api-access-wgzrz") pod "31110c5c-0cbc-4a43-a2ff-36c7fd353e5d" (UID: "31110c5c-0cbc-4a43-a2ff-36c7fd353e5d"). InnerVolumeSpecName "kube-api-access-wgzrz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.290676 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-inventory" (OuterVolumeSpecName: "inventory") pod "31110c5c-0cbc-4a43-a2ff-36c7fd353e5d" (UID: "31110c5c-0cbc-4a43-a2ff-36c7fd353e5d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.297420 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "31110c5c-0cbc-4a43-a2ff-36c7fd353e5d" (UID: "31110c5c-0cbc-4a43-a2ff-36c7fd353e5d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.360880 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.360924 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.360941 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.360953 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgzrz\" (UniqueName: \"kubernetes.io/projected/31110c5c-0cbc-4a43-a2ff-36c7fd353e5d-kube-api-access-wgzrz\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.709212 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" event={"ID":"31110c5c-0cbc-4a43-a2ff-36c7fd353e5d","Type":"ContainerDied","Data":"007450e0e4076563852d0f14a86db80e1bdd4695787a9a0814487b69ac903a05"} Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.709255 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="007450e0e4076563852d0f14a86db80e1bdd4695787a9a0814487b69ac903a05" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.709324 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.798748 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj"] Sep 30 01:07:11 crc kubenswrapper[4809]: E0930 01:07:11.799255 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31110c5c-0cbc-4a43-a2ff-36c7fd353e5d" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.799276 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="31110c5c-0cbc-4a43-a2ff-36c7fd353e5d" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.799516 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="31110c5c-0cbc-4a43-a2ff-36c7fd353e5d" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.800298 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.804183 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.804473 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.804630 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.805026 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.804668 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.807021 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.808396 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj"] Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.870588 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7nrfj\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.870728 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7nrfj\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.870790 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/16525fbd-5a20-46fc-a7d2-95860193d091-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7nrfj\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.870839 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7nrfj\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.870884 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7nrfj\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.870933 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d52nh\" (UniqueName: \"kubernetes.io/projected/16525fbd-5a20-46fc-a7d2-95860193d091-kube-api-access-d52nh\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7nrfj\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.972728 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7nrfj\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.972821 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/16525fbd-5a20-46fc-a7d2-95860193d091-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7nrfj\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.972878 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7nrfj\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.972917 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7nrfj\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.972966 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d52nh\" (UniqueName: \"kubernetes.io/projected/16525fbd-5a20-46fc-a7d2-95860193d091-kube-api-access-d52nh\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7nrfj\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.973030 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7nrfj\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.973835 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/16525fbd-5a20-46fc-a7d2-95860193d091-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7nrfj\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.979455 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7nrfj\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.979563 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7nrfj\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.980024 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7nrfj\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.980291 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7nrfj\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:07:11 crc kubenswrapper[4809]: I0930 01:07:11.996413 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d52nh\" (UniqueName: \"kubernetes.io/projected/16525fbd-5a20-46fc-a7d2-95860193d091-kube-api-access-d52nh\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-7nrfj\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:07:12 crc kubenswrapper[4809]: I0930 01:07:12.164449 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:07:12 crc kubenswrapper[4809]: W0930 01:07:12.721662 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16525fbd_5a20_46fc_a7d2_95860193d091.slice/crio-082a29bb07b619ffdd0ba4a5e7936c84d674de96974161eeb664981d16cecf2e WatchSource:0}: Error finding container 082a29bb07b619ffdd0ba4a5e7936c84d674de96974161eeb664981d16cecf2e: Status 404 returned error can't find the container with id 082a29bb07b619ffdd0ba4a5e7936c84d674de96974161eeb664981d16cecf2e Sep 30 01:07:12 crc kubenswrapper[4809]: I0930 01:07:12.724435 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 01:07:12 crc kubenswrapper[4809]: I0930 01:07:12.729885 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj"] Sep 30 01:07:13 crc kubenswrapper[4809]: I0930 01:07:13.733273 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" event={"ID":"16525fbd-5a20-46fc-a7d2-95860193d091","Type":"ContainerStarted","Data":"4bb4b7f1c8a25138b187c74e1b5610f61ae0e2f05545763b949eb21a48015a58"} Sep 30 01:07:13 crc kubenswrapper[4809]: I0930 01:07:13.733719 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" event={"ID":"16525fbd-5a20-46fc-a7d2-95860193d091","Type":"ContainerStarted","Data":"082a29bb07b619ffdd0ba4a5e7936c84d674de96974161eeb664981d16cecf2e"} Sep 30 01:07:13 crc kubenswrapper[4809]: I0930 01:07:13.763057 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" podStartSLOduration=2.219629955 podStartE2EDuration="2.763037127s" podCreationTimestamp="2025-09-30 01:07:11 +0000 UTC" firstStartedPulling="2025-09-30 01:07:12.724157304 +0000 UTC m=+3483.760406722" lastFinishedPulling="2025-09-30 01:07:13.267564486 +0000 UTC m=+3484.303813894" observedRunningTime="2025-09-30 01:07:13.752216751 +0000 UTC m=+3484.788466199" watchObservedRunningTime="2025-09-30 01:07:13.763037127 +0000 UTC m=+3484.799286535" Sep 30 01:07:25 crc kubenswrapper[4809]: I0930 01:07:25.456446 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rp4sm"] Sep 30 01:07:25 crc kubenswrapper[4809]: I0930 01:07:25.461787 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rp4sm" Sep 30 01:07:25 crc kubenswrapper[4809]: I0930 01:07:25.489616 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rp4sm"] Sep 30 01:07:25 crc kubenswrapper[4809]: I0930 01:07:25.522290 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aed2e73-a28b-40d6-a668-6adcf6786e11-catalog-content\") pod \"community-operators-rp4sm\" (UID: \"5aed2e73-a28b-40d6-a668-6adcf6786e11\") " pod="openshift-marketplace/community-operators-rp4sm" Sep 30 01:07:25 crc kubenswrapper[4809]: I0930 01:07:25.522470 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aed2e73-a28b-40d6-a668-6adcf6786e11-utilities\") pod \"community-operators-rp4sm\" (UID: \"5aed2e73-a28b-40d6-a668-6adcf6786e11\") " pod="openshift-marketplace/community-operators-rp4sm" Sep 30 01:07:25 crc kubenswrapper[4809]: I0930 01:07:25.522516 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lbbf\" (UniqueName: \"kubernetes.io/projected/5aed2e73-a28b-40d6-a668-6adcf6786e11-kube-api-access-2lbbf\") pod \"community-operators-rp4sm\" (UID: \"5aed2e73-a28b-40d6-a668-6adcf6786e11\") " pod="openshift-marketplace/community-operators-rp4sm" Sep 30 01:07:25 crc kubenswrapper[4809]: I0930 01:07:25.623830 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aed2e73-a28b-40d6-a668-6adcf6786e11-catalog-content\") pod \"community-operators-rp4sm\" (UID: \"5aed2e73-a28b-40d6-a668-6adcf6786e11\") " pod="openshift-marketplace/community-operators-rp4sm" Sep 30 01:07:25 crc kubenswrapper[4809]: I0930 01:07:25.623955 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aed2e73-a28b-40d6-a668-6adcf6786e11-utilities\") pod \"community-operators-rp4sm\" (UID: \"5aed2e73-a28b-40d6-a668-6adcf6786e11\") " pod="openshift-marketplace/community-operators-rp4sm" Sep 30 01:07:25 crc kubenswrapper[4809]: I0930 01:07:25.623990 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lbbf\" (UniqueName: \"kubernetes.io/projected/5aed2e73-a28b-40d6-a668-6adcf6786e11-kube-api-access-2lbbf\") pod \"community-operators-rp4sm\" (UID: \"5aed2e73-a28b-40d6-a668-6adcf6786e11\") " pod="openshift-marketplace/community-operators-rp4sm" Sep 30 01:07:25 crc kubenswrapper[4809]: I0930 01:07:25.624564 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aed2e73-a28b-40d6-a668-6adcf6786e11-utilities\") pod \"community-operators-rp4sm\" (UID: \"5aed2e73-a28b-40d6-a668-6adcf6786e11\") " pod="openshift-marketplace/community-operators-rp4sm" Sep 30 01:07:25 crc kubenswrapper[4809]: I0930 01:07:25.625026 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aed2e73-a28b-40d6-a668-6adcf6786e11-catalog-content\") pod \"community-operators-rp4sm\" (UID: \"5aed2e73-a28b-40d6-a668-6adcf6786e11\") " pod="openshift-marketplace/community-operators-rp4sm" Sep 30 01:07:25 crc kubenswrapper[4809]: I0930 01:07:25.655059 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lbbf\" (UniqueName: \"kubernetes.io/projected/5aed2e73-a28b-40d6-a668-6adcf6786e11-kube-api-access-2lbbf\") pod \"community-operators-rp4sm\" (UID: \"5aed2e73-a28b-40d6-a668-6adcf6786e11\") " pod="openshift-marketplace/community-operators-rp4sm" Sep 30 01:07:25 crc kubenswrapper[4809]: I0930 01:07:25.802305 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rp4sm" Sep 30 01:07:26 crc kubenswrapper[4809]: I0930 01:07:26.361782 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rp4sm"] Sep 30 01:07:26 crc kubenswrapper[4809]: I0930 01:07:26.902458 4809 generic.go:334] "Generic (PLEG): container finished" podID="5aed2e73-a28b-40d6-a668-6adcf6786e11" containerID="5ffd4ad54a3be171b401b59beac2d4f97f4c884dbf5ef44b58307f7fdf904c83" exitCode=0 Sep 30 01:07:26 crc kubenswrapper[4809]: I0930 01:07:26.902835 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rp4sm" event={"ID":"5aed2e73-a28b-40d6-a668-6adcf6786e11","Type":"ContainerDied","Data":"5ffd4ad54a3be171b401b59beac2d4f97f4c884dbf5ef44b58307f7fdf904c83"} Sep 30 01:07:26 crc kubenswrapper[4809]: I0930 01:07:26.902880 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rp4sm" event={"ID":"5aed2e73-a28b-40d6-a668-6adcf6786e11","Type":"ContainerStarted","Data":"e4c7bfcf73ccc7210e3bb1a2746b360f7e1a54cf98115ed6d1ec3528de879c02"} Sep 30 01:07:27 crc kubenswrapper[4809]: I0930 01:07:27.918089 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rp4sm" event={"ID":"5aed2e73-a28b-40d6-a668-6adcf6786e11","Type":"ContainerStarted","Data":"7901f6d5489124dab98071cc69b7bbfaf3282053f59bff75ae32b05bffbe855e"} Sep 30 01:07:28 crc kubenswrapper[4809]: I0930 01:07:28.948864 4809 generic.go:334] "Generic (PLEG): container finished" podID="5aed2e73-a28b-40d6-a668-6adcf6786e11" containerID="7901f6d5489124dab98071cc69b7bbfaf3282053f59bff75ae32b05bffbe855e" exitCode=0 Sep 30 01:07:28 crc kubenswrapper[4809]: I0930 01:07:28.949224 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rp4sm" event={"ID":"5aed2e73-a28b-40d6-a668-6adcf6786e11","Type":"ContainerDied","Data":"7901f6d5489124dab98071cc69b7bbfaf3282053f59bff75ae32b05bffbe855e"} Sep 30 01:07:29 crc kubenswrapper[4809]: I0930 01:07:29.964621 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rp4sm" event={"ID":"5aed2e73-a28b-40d6-a668-6adcf6786e11","Type":"ContainerStarted","Data":"677ec8f746fdd4788ac751c8ab0a6d226353decf2980ede595e6252eaf039904"} Sep 30 01:07:29 crc kubenswrapper[4809]: I0930 01:07:29.985808 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rp4sm" podStartSLOduration=2.416368338 podStartE2EDuration="4.98577264s" podCreationTimestamp="2025-09-30 01:07:25 +0000 UTC" firstStartedPulling="2025-09-30 01:07:26.904868098 +0000 UTC m=+3497.941117546" lastFinishedPulling="2025-09-30 01:07:29.47427244 +0000 UTC m=+3500.510521848" observedRunningTime="2025-09-30 01:07:29.984016802 +0000 UTC m=+3501.020266250" watchObservedRunningTime="2025-09-30 01:07:29.98577264 +0000 UTC m=+3501.022022098" Sep 30 01:07:35 crc kubenswrapper[4809]: I0930 01:07:35.803390 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rp4sm" Sep 30 01:07:35 crc kubenswrapper[4809]: I0930 01:07:35.804228 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rp4sm" Sep 30 01:07:35 crc kubenswrapper[4809]: I0930 01:07:35.895524 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rp4sm" Sep 30 01:07:36 crc kubenswrapper[4809]: I0930 01:07:36.117380 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rp4sm" Sep 30 01:07:36 crc kubenswrapper[4809]: I0930 01:07:36.177630 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rp4sm"] Sep 30 01:07:38 crc kubenswrapper[4809]: I0930 01:07:38.078877 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rp4sm" podUID="5aed2e73-a28b-40d6-a668-6adcf6786e11" containerName="registry-server" containerID="cri-o://677ec8f746fdd4788ac751c8ab0a6d226353decf2980ede595e6252eaf039904" gracePeriod=2 Sep 30 01:07:39 crc kubenswrapper[4809]: I0930 01:07:39.090015 4809 generic.go:334] "Generic (PLEG): container finished" podID="5aed2e73-a28b-40d6-a668-6adcf6786e11" containerID="677ec8f746fdd4788ac751c8ab0a6d226353decf2980ede595e6252eaf039904" exitCode=0 Sep 30 01:07:39 crc kubenswrapper[4809]: I0930 01:07:39.089900 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rp4sm" event={"ID":"5aed2e73-a28b-40d6-a668-6adcf6786e11","Type":"ContainerDied","Data":"677ec8f746fdd4788ac751c8ab0a6d226353decf2980ede595e6252eaf039904"} Sep 30 01:07:39 crc kubenswrapper[4809]: I0930 01:07:39.091168 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rp4sm" event={"ID":"5aed2e73-a28b-40d6-a668-6adcf6786e11","Type":"ContainerDied","Data":"e4c7bfcf73ccc7210e3bb1a2746b360f7e1a54cf98115ed6d1ec3528de879c02"} Sep 30 01:07:39 crc kubenswrapper[4809]: I0930 01:07:39.091231 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4c7bfcf73ccc7210e3bb1a2746b360f7e1a54cf98115ed6d1ec3528de879c02" Sep 30 01:07:39 crc kubenswrapper[4809]: I0930 01:07:39.152486 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rp4sm" Sep 30 01:07:39 crc kubenswrapper[4809]: I0930 01:07:39.241337 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aed2e73-a28b-40d6-a668-6adcf6786e11-utilities\") pod \"5aed2e73-a28b-40d6-a668-6adcf6786e11\" (UID: \"5aed2e73-a28b-40d6-a668-6adcf6786e11\") " Sep 30 01:07:39 crc kubenswrapper[4809]: I0930 01:07:39.241461 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lbbf\" (UniqueName: \"kubernetes.io/projected/5aed2e73-a28b-40d6-a668-6adcf6786e11-kube-api-access-2lbbf\") pod \"5aed2e73-a28b-40d6-a668-6adcf6786e11\" (UID: \"5aed2e73-a28b-40d6-a668-6adcf6786e11\") " Sep 30 01:07:39 crc kubenswrapper[4809]: I0930 01:07:39.241529 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aed2e73-a28b-40d6-a668-6adcf6786e11-catalog-content\") pod \"5aed2e73-a28b-40d6-a668-6adcf6786e11\" (UID: \"5aed2e73-a28b-40d6-a668-6adcf6786e11\") " Sep 30 01:07:39 crc kubenswrapper[4809]: I0930 01:07:39.242352 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5aed2e73-a28b-40d6-a668-6adcf6786e11-utilities" (OuterVolumeSpecName: "utilities") pod "5aed2e73-a28b-40d6-a668-6adcf6786e11" (UID: "5aed2e73-a28b-40d6-a668-6adcf6786e11"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:07:39 crc kubenswrapper[4809]: I0930 01:07:39.248746 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5aed2e73-a28b-40d6-a668-6adcf6786e11-kube-api-access-2lbbf" (OuterVolumeSpecName: "kube-api-access-2lbbf") pod "5aed2e73-a28b-40d6-a668-6adcf6786e11" (UID: "5aed2e73-a28b-40d6-a668-6adcf6786e11"). InnerVolumeSpecName "kube-api-access-2lbbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:07:39 crc kubenswrapper[4809]: I0930 01:07:39.289683 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5aed2e73-a28b-40d6-a668-6adcf6786e11-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5aed2e73-a28b-40d6-a668-6adcf6786e11" (UID: "5aed2e73-a28b-40d6-a668-6adcf6786e11"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:07:39 crc kubenswrapper[4809]: I0930 01:07:39.344513 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aed2e73-a28b-40d6-a668-6adcf6786e11-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:39 crc kubenswrapper[4809]: I0930 01:07:39.344860 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lbbf\" (UniqueName: \"kubernetes.io/projected/5aed2e73-a28b-40d6-a668-6adcf6786e11-kube-api-access-2lbbf\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:39 crc kubenswrapper[4809]: I0930 01:07:39.344954 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aed2e73-a28b-40d6-a668-6adcf6786e11-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:07:40 crc kubenswrapper[4809]: I0930 01:07:40.102456 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rp4sm" Sep 30 01:07:40 crc kubenswrapper[4809]: I0930 01:07:40.152820 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rp4sm"] Sep 30 01:07:40 crc kubenswrapper[4809]: I0930 01:07:40.167315 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rp4sm"] Sep 30 01:07:41 crc kubenswrapper[4809]: I0930 01:07:41.715128 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5aed2e73-a28b-40d6-a668-6adcf6786e11" path="/var/lib/kubelet/pods/5aed2e73-a28b-40d6-a668-6adcf6786e11/volumes" Sep 30 01:08:25 crc kubenswrapper[4809]: I0930 01:08:25.325023 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:08:25 crc kubenswrapper[4809]: I0930 01:08:25.325680 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:08:40 crc kubenswrapper[4809]: I0930 01:08:40.864677 4809 generic.go:334] "Generic (PLEG): container finished" podID="16525fbd-5a20-46fc-a7d2-95860193d091" containerID="4bb4b7f1c8a25138b187c74e1b5610f61ae0e2f05545763b949eb21a48015a58" exitCode=0 Sep 30 01:08:40 crc kubenswrapper[4809]: I0930 01:08:40.864769 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" event={"ID":"16525fbd-5a20-46fc-a7d2-95860193d091","Type":"ContainerDied","Data":"4bb4b7f1c8a25138b187c74e1b5610f61ae0e2f05545763b949eb21a48015a58"} Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.367124 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.425134 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/16525fbd-5a20-46fc-a7d2-95860193d091-ovncontroller-config-0\") pod \"16525fbd-5a20-46fc-a7d2-95860193d091\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.425298 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-ovn-combined-ca-bundle\") pod \"16525fbd-5a20-46fc-a7d2-95860193d091\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.425438 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-ceph\") pod \"16525fbd-5a20-46fc-a7d2-95860193d091\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.425499 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-inventory\") pod \"16525fbd-5a20-46fc-a7d2-95860193d091\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.425698 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d52nh\" (UniqueName: \"kubernetes.io/projected/16525fbd-5a20-46fc-a7d2-95860193d091-kube-api-access-d52nh\") pod \"16525fbd-5a20-46fc-a7d2-95860193d091\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.425733 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-ssh-key\") pod \"16525fbd-5a20-46fc-a7d2-95860193d091\" (UID: \"16525fbd-5a20-46fc-a7d2-95860193d091\") " Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.436717 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-ceph" (OuterVolumeSpecName: "ceph") pod "16525fbd-5a20-46fc-a7d2-95860193d091" (UID: "16525fbd-5a20-46fc-a7d2-95860193d091"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.436873 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16525fbd-5a20-46fc-a7d2-95860193d091-kube-api-access-d52nh" (OuterVolumeSpecName: "kube-api-access-d52nh") pod "16525fbd-5a20-46fc-a7d2-95860193d091" (UID: "16525fbd-5a20-46fc-a7d2-95860193d091"). InnerVolumeSpecName "kube-api-access-d52nh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.437208 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "16525fbd-5a20-46fc-a7d2-95860193d091" (UID: "16525fbd-5a20-46fc-a7d2-95860193d091"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.463577 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-inventory" (OuterVolumeSpecName: "inventory") pod "16525fbd-5a20-46fc-a7d2-95860193d091" (UID: "16525fbd-5a20-46fc-a7d2-95860193d091"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.465142 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "16525fbd-5a20-46fc-a7d2-95860193d091" (UID: "16525fbd-5a20-46fc-a7d2-95860193d091"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.467877 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16525fbd-5a20-46fc-a7d2-95860193d091-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "16525fbd-5a20-46fc-a7d2-95860193d091" (UID: "16525fbd-5a20-46fc-a7d2-95860193d091"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.528851 4809 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.528896 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.528909 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.528922 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d52nh\" (UniqueName: \"kubernetes.io/projected/16525fbd-5a20-46fc-a7d2-95860193d091-kube-api-access-d52nh\") on node \"crc\" DevicePath \"\"" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.528933 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16525fbd-5a20-46fc-a7d2-95860193d091-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.528944 4809 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/16525fbd-5a20-46fc-a7d2-95860193d091-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.887277 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" event={"ID":"16525fbd-5a20-46fc-a7d2-95860193d091","Type":"ContainerDied","Data":"082a29bb07b619ffdd0ba4a5e7936c84d674de96974161eeb664981d16cecf2e"} Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.887319 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="082a29bb07b619ffdd0ba4a5e7936c84d674de96974161eeb664981d16cecf2e" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.887346 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-7nrfj" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.991664 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2"] Sep 30 01:08:42 crc kubenswrapper[4809]: E0930 01:08:42.992127 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aed2e73-a28b-40d6-a668-6adcf6786e11" containerName="registry-server" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.992148 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aed2e73-a28b-40d6-a668-6adcf6786e11" containerName="registry-server" Sep 30 01:08:42 crc kubenswrapper[4809]: E0930 01:08:42.992185 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16525fbd-5a20-46fc-a7d2-95860193d091" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.992194 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="16525fbd-5a20-46fc-a7d2-95860193d091" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 30 01:08:42 crc kubenswrapper[4809]: E0930 01:08:42.992221 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aed2e73-a28b-40d6-a668-6adcf6786e11" containerName="extract-utilities" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.992230 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aed2e73-a28b-40d6-a668-6adcf6786e11" containerName="extract-utilities" Sep 30 01:08:42 crc kubenswrapper[4809]: E0930 01:08:42.992256 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aed2e73-a28b-40d6-a668-6adcf6786e11" containerName="extract-content" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.992263 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aed2e73-a28b-40d6-a668-6adcf6786e11" containerName="extract-content" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.992470 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="16525fbd-5a20-46fc-a7d2-95860193d091" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.992485 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="5aed2e73-a28b-40d6-a668-6adcf6786e11" containerName="registry-server" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.993341 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.995349 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.995593 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.995831 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.996576 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.996760 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.996843 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 01:08:42 crc kubenswrapper[4809]: I0930 01:08:42.997565 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.008483 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2"] Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.040671 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.040748 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.040799 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.040861 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.041035 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.041070 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbqvh\" (UniqueName: \"kubernetes.io/projected/6338d96e-b4cf-4390-b303-f7eb46f3e68a-kube-api-access-mbqvh\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.041168 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.142839 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.142901 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.143000 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.143033 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbqvh\" (UniqueName: \"kubernetes.io/projected/6338d96e-b4cf-4390-b303-f7eb46f3e68a-kube-api-access-mbqvh\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.143093 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.143145 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.143200 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.147272 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.147606 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.148047 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.148362 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.149207 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.149868 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.159804 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbqvh\" (UniqueName: \"kubernetes.io/projected/6338d96e-b4cf-4390-b303-f7eb46f3e68a-kube-api-access-mbqvh\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.310591 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.887932 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2"] Sep 30 01:08:43 crc kubenswrapper[4809]: I0930 01:08:43.900165 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" event={"ID":"6338d96e-b4cf-4390-b303-f7eb46f3e68a","Type":"ContainerStarted","Data":"3db3eeb2e5f616cc8074a7e8c885cdc7284421cbba2e6ba1439225a964072d35"} Sep 30 01:08:45 crc kubenswrapper[4809]: I0930 01:08:45.922187 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" event={"ID":"6338d96e-b4cf-4390-b303-f7eb46f3e68a","Type":"ContainerStarted","Data":"08ec608ecdc72c4bf23792b3b435d45460bc3c47995c91d1a8545e46c722ef72"} Sep 30 01:08:45 crc kubenswrapper[4809]: I0930 01:08:45.940959 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" podStartSLOduration=3.226797593 podStartE2EDuration="3.940937099s" podCreationTimestamp="2025-09-30 01:08:42 +0000 UTC" firstStartedPulling="2025-09-30 01:08:43.893426069 +0000 UTC m=+3574.929675477" lastFinishedPulling="2025-09-30 01:08:44.607565555 +0000 UTC m=+3575.643814983" observedRunningTime="2025-09-30 01:08:45.938288577 +0000 UTC m=+3576.974537985" watchObservedRunningTime="2025-09-30 01:08:45.940937099 +0000 UTC m=+3576.977186507" Sep 30 01:08:55 crc kubenswrapper[4809]: I0930 01:08:55.325315 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:08:55 crc kubenswrapper[4809]: I0930 01:08:55.325913 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:09:02 crc kubenswrapper[4809]: I0930 01:09:02.410765 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-m7n6m"] Sep 30 01:09:02 crc kubenswrapper[4809]: I0930 01:09:02.413268 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m7n6m" Sep 30 01:09:02 crc kubenswrapper[4809]: I0930 01:09:02.421659 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m7n6m"] Sep 30 01:09:02 crc kubenswrapper[4809]: I0930 01:09:02.604740 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jpjp\" (UniqueName: \"kubernetes.io/projected/684096df-502a-401d-8fac-eaa9c2ed9337-kube-api-access-6jpjp\") pod \"redhat-operators-m7n6m\" (UID: \"684096df-502a-401d-8fac-eaa9c2ed9337\") " pod="openshift-marketplace/redhat-operators-m7n6m" Sep 30 01:09:02 crc kubenswrapper[4809]: I0930 01:09:02.605088 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/684096df-502a-401d-8fac-eaa9c2ed9337-utilities\") pod \"redhat-operators-m7n6m\" (UID: \"684096df-502a-401d-8fac-eaa9c2ed9337\") " pod="openshift-marketplace/redhat-operators-m7n6m" Sep 30 01:09:02 crc kubenswrapper[4809]: I0930 01:09:02.605131 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/684096df-502a-401d-8fac-eaa9c2ed9337-catalog-content\") pod \"redhat-operators-m7n6m\" (UID: \"684096df-502a-401d-8fac-eaa9c2ed9337\") " pod="openshift-marketplace/redhat-operators-m7n6m" Sep 30 01:09:02 crc kubenswrapper[4809]: I0930 01:09:02.706881 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/684096df-502a-401d-8fac-eaa9c2ed9337-catalog-content\") pod \"redhat-operators-m7n6m\" (UID: \"684096df-502a-401d-8fac-eaa9c2ed9337\") " pod="openshift-marketplace/redhat-operators-m7n6m" Sep 30 01:09:02 crc kubenswrapper[4809]: I0930 01:09:02.707138 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jpjp\" (UniqueName: \"kubernetes.io/projected/684096df-502a-401d-8fac-eaa9c2ed9337-kube-api-access-6jpjp\") pod \"redhat-operators-m7n6m\" (UID: \"684096df-502a-401d-8fac-eaa9c2ed9337\") " pod="openshift-marketplace/redhat-operators-m7n6m" Sep 30 01:09:02 crc kubenswrapper[4809]: I0930 01:09:02.707179 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/684096df-502a-401d-8fac-eaa9c2ed9337-utilities\") pod \"redhat-operators-m7n6m\" (UID: \"684096df-502a-401d-8fac-eaa9c2ed9337\") " pod="openshift-marketplace/redhat-operators-m7n6m" Sep 30 01:09:02 crc kubenswrapper[4809]: I0930 01:09:02.707423 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/684096df-502a-401d-8fac-eaa9c2ed9337-catalog-content\") pod \"redhat-operators-m7n6m\" (UID: \"684096df-502a-401d-8fac-eaa9c2ed9337\") " pod="openshift-marketplace/redhat-operators-m7n6m" Sep 30 01:09:02 crc kubenswrapper[4809]: I0930 01:09:02.707945 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/684096df-502a-401d-8fac-eaa9c2ed9337-utilities\") pod \"redhat-operators-m7n6m\" (UID: \"684096df-502a-401d-8fac-eaa9c2ed9337\") " pod="openshift-marketplace/redhat-operators-m7n6m" Sep 30 01:09:02 crc kubenswrapper[4809]: I0930 01:09:02.729684 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jpjp\" (UniqueName: \"kubernetes.io/projected/684096df-502a-401d-8fac-eaa9c2ed9337-kube-api-access-6jpjp\") pod \"redhat-operators-m7n6m\" (UID: \"684096df-502a-401d-8fac-eaa9c2ed9337\") " pod="openshift-marketplace/redhat-operators-m7n6m" Sep 30 01:09:02 crc kubenswrapper[4809]: I0930 01:09:02.735418 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m7n6m" Sep 30 01:09:03 crc kubenswrapper[4809]: I0930 01:09:03.210785 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m7n6m"] Sep 30 01:09:03 crc kubenswrapper[4809]: W0930 01:09:03.217748 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod684096df_502a_401d_8fac_eaa9c2ed9337.slice/crio-2b4315c5b36ad799e5044dbb8b944eb6c7e5cce60c08c53da4561d6122171b2b WatchSource:0}: Error finding container 2b4315c5b36ad799e5044dbb8b944eb6c7e5cce60c08c53da4561d6122171b2b: Status 404 returned error can't find the container with id 2b4315c5b36ad799e5044dbb8b944eb6c7e5cce60c08c53da4561d6122171b2b Sep 30 01:09:04 crc kubenswrapper[4809]: I0930 01:09:04.137337 4809 generic.go:334] "Generic (PLEG): container finished" podID="684096df-502a-401d-8fac-eaa9c2ed9337" containerID="a7067852b3574bf40ed831bbf7b71755aef120a46b8a9ed4e271bcb605beb048" exitCode=0 Sep 30 01:09:04 crc kubenswrapper[4809]: I0930 01:09:04.137427 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7n6m" event={"ID":"684096df-502a-401d-8fac-eaa9c2ed9337","Type":"ContainerDied","Data":"a7067852b3574bf40ed831bbf7b71755aef120a46b8a9ed4e271bcb605beb048"} Sep 30 01:09:04 crc kubenswrapper[4809]: I0930 01:09:04.138887 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7n6m" event={"ID":"684096df-502a-401d-8fac-eaa9c2ed9337","Type":"ContainerStarted","Data":"2b4315c5b36ad799e5044dbb8b944eb6c7e5cce60c08c53da4561d6122171b2b"} Sep 30 01:09:05 crc kubenswrapper[4809]: I0930 01:09:05.154038 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7n6m" event={"ID":"684096df-502a-401d-8fac-eaa9c2ed9337","Type":"ContainerStarted","Data":"ce7d40c698fd9e817e0f1594af45e3dfc96dfbcbd85afda3ae42aa15d4ba3e3c"} Sep 30 01:09:06 crc kubenswrapper[4809]: I0930 01:09:06.176382 4809 generic.go:334] "Generic (PLEG): container finished" podID="684096df-502a-401d-8fac-eaa9c2ed9337" containerID="ce7d40c698fd9e817e0f1594af45e3dfc96dfbcbd85afda3ae42aa15d4ba3e3c" exitCode=0 Sep 30 01:09:06 crc kubenswrapper[4809]: I0930 01:09:06.176451 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7n6m" event={"ID":"684096df-502a-401d-8fac-eaa9c2ed9337","Type":"ContainerDied","Data":"ce7d40c698fd9e817e0f1594af45e3dfc96dfbcbd85afda3ae42aa15d4ba3e3c"} Sep 30 01:09:08 crc kubenswrapper[4809]: I0930 01:09:08.217257 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7n6m" event={"ID":"684096df-502a-401d-8fac-eaa9c2ed9337","Type":"ContainerStarted","Data":"67e0a394a7da1771abd43a5fd8119c545befe06389d8edda524f6b9532317145"} Sep 30 01:09:08 crc kubenswrapper[4809]: I0930 01:09:08.258121 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-m7n6m" podStartSLOduration=3.456863559 podStartE2EDuration="6.258102627s" podCreationTimestamp="2025-09-30 01:09:02 +0000 UTC" firstStartedPulling="2025-09-30 01:09:04.140325235 +0000 UTC m=+3595.176574633" lastFinishedPulling="2025-09-30 01:09:06.941564253 +0000 UTC m=+3597.977813701" observedRunningTime="2025-09-30 01:09:08.254972182 +0000 UTC m=+3599.291221590" watchObservedRunningTime="2025-09-30 01:09:08.258102627 +0000 UTC m=+3599.294352035" Sep 30 01:09:12 crc kubenswrapper[4809]: I0930 01:09:12.736559 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-m7n6m" Sep 30 01:09:12 crc kubenswrapper[4809]: I0930 01:09:12.737321 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-m7n6m" Sep 30 01:09:12 crc kubenswrapper[4809]: I0930 01:09:12.795512 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-m7n6m" Sep 30 01:09:13 crc kubenswrapper[4809]: I0930 01:09:13.322363 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-m7n6m" Sep 30 01:09:13 crc kubenswrapper[4809]: I0930 01:09:13.368958 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m7n6m"] Sep 30 01:09:15 crc kubenswrapper[4809]: I0930 01:09:15.291816 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-m7n6m" podUID="684096df-502a-401d-8fac-eaa9c2ed9337" containerName="registry-server" containerID="cri-o://67e0a394a7da1771abd43a5fd8119c545befe06389d8edda524f6b9532317145" gracePeriod=2 Sep 30 01:09:15 crc kubenswrapper[4809]: I0930 01:09:15.891462 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m7n6m" Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.038339 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/684096df-502a-401d-8fac-eaa9c2ed9337-utilities\") pod \"684096df-502a-401d-8fac-eaa9c2ed9337\" (UID: \"684096df-502a-401d-8fac-eaa9c2ed9337\") " Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.038573 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jpjp\" (UniqueName: \"kubernetes.io/projected/684096df-502a-401d-8fac-eaa9c2ed9337-kube-api-access-6jpjp\") pod \"684096df-502a-401d-8fac-eaa9c2ed9337\" (UID: \"684096df-502a-401d-8fac-eaa9c2ed9337\") " Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.038605 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/684096df-502a-401d-8fac-eaa9c2ed9337-catalog-content\") pod \"684096df-502a-401d-8fac-eaa9c2ed9337\" (UID: \"684096df-502a-401d-8fac-eaa9c2ed9337\") " Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.038898 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/684096df-502a-401d-8fac-eaa9c2ed9337-utilities" (OuterVolumeSpecName: "utilities") pod "684096df-502a-401d-8fac-eaa9c2ed9337" (UID: "684096df-502a-401d-8fac-eaa9c2ed9337"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.039381 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/684096df-502a-401d-8fac-eaa9c2ed9337-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.044073 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/684096df-502a-401d-8fac-eaa9c2ed9337-kube-api-access-6jpjp" (OuterVolumeSpecName: "kube-api-access-6jpjp") pod "684096df-502a-401d-8fac-eaa9c2ed9337" (UID: "684096df-502a-401d-8fac-eaa9c2ed9337"). InnerVolumeSpecName "kube-api-access-6jpjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.119200 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/684096df-502a-401d-8fac-eaa9c2ed9337-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "684096df-502a-401d-8fac-eaa9c2ed9337" (UID: "684096df-502a-401d-8fac-eaa9c2ed9337"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.140688 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jpjp\" (UniqueName: \"kubernetes.io/projected/684096df-502a-401d-8fac-eaa9c2ed9337-kube-api-access-6jpjp\") on node \"crc\" DevicePath \"\"" Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.140723 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/684096df-502a-401d-8fac-eaa9c2ed9337-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.305795 4809 generic.go:334] "Generic (PLEG): container finished" podID="684096df-502a-401d-8fac-eaa9c2ed9337" containerID="67e0a394a7da1771abd43a5fd8119c545befe06389d8edda524f6b9532317145" exitCode=0 Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.305836 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7n6m" event={"ID":"684096df-502a-401d-8fac-eaa9c2ed9337","Type":"ContainerDied","Data":"67e0a394a7da1771abd43a5fd8119c545befe06389d8edda524f6b9532317145"} Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.305862 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7n6m" event={"ID":"684096df-502a-401d-8fac-eaa9c2ed9337","Type":"ContainerDied","Data":"2b4315c5b36ad799e5044dbb8b944eb6c7e5cce60c08c53da4561d6122171b2b"} Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.305882 4809 scope.go:117] "RemoveContainer" containerID="67e0a394a7da1771abd43a5fd8119c545befe06389d8edda524f6b9532317145" Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.306016 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m7n6m" Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.334999 4809 scope.go:117] "RemoveContainer" containerID="ce7d40c698fd9e817e0f1594af45e3dfc96dfbcbd85afda3ae42aa15d4ba3e3c" Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.345365 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m7n6m"] Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.360419 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-m7n6m"] Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.370356 4809 scope.go:117] "RemoveContainer" containerID="a7067852b3574bf40ed831bbf7b71755aef120a46b8a9ed4e271bcb605beb048" Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.413129 4809 scope.go:117] "RemoveContainer" containerID="67e0a394a7da1771abd43a5fd8119c545befe06389d8edda524f6b9532317145" Sep 30 01:09:16 crc kubenswrapper[4809]: E0930 01:09:16.413612 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67e0a394a7da1771abd43a5fd8119c545befe06389d8edda524f6b9532317145\": container with ID starting with 67e0a394a7da1771abd43a5fd8119c545befe06389d8edda524f6b9532317145 not found: ID does not exist" containerID="67e0a394a7da1771abd43a5fd8119c545befe06389d8edda524f6b9532317145" Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.413669 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67e0a394a7da1771abd43a5fd8119c545befe06389d8edda524f6b9532317145"} err="failed to get container status \"67e0a394a7da1771abd43a5fd8119c545befe06389d8edda524f6b9532317145\": rpc error: code = NotFound desc = could not find container \"67e0a394a7da1771abd43a5fd8119c545befe06389d8edda524f6b9532317145\": container with ID starting with 67e0a394a7da1771abd43a5fd8119c545befe06389d8edda524f6b9532317145 not found: ID does not exist" Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.413699 4809 scope.go:117] "RemoveContainer" containerID="ce7d40c698fd9e817e0f1594af45e3dfc96dfbcbd85afda3ae42aa15d4ba3e3c" Sep 30 01:09:16 crc kubenswrapper[4809]: E0930 01:09:16.414250 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce7d40c698fd9e817e0f1594af45e3dfc96dfbcbd85afda3ae42aa15d4ba3e3c\": container with ID starting with ce7d40c698fd9e817e0f1594af45e3dfc96dfbcbd85afda3ae42aa15d4ba3e3c not found: ID does not exist" containerID="ce7d40c698fd9e817e0f1594af45e3dfc96dfbcbd85afda3ae42aa15d4ba3e3c" Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.414291 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce7d40c698fd9e817e0f1594af45e3dfc96dfbcbd85afda3ae42aa15d4ba3e3c"} err="failed to get container status \"ce7d40c698fd9e817e0f1594af45e3dfc96dfbcbd85afda3ae42aa15d4ba3e3c\": rpc error: code = NotFound desc = could not find container \"ce7d40c698fd9e817e0f1594af45e3dfc96dfbcbd85afda3ae42aa15d4ba3e3c\": container with ID starting with ce7d40c698fd9e817e0f1594af45e3dfc96dfbcbd85afda3ae42aa15d4ba3e3c not found: ID does not exist" Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.414318 4809 scope.go:117] "RemoveContainer" containerID="a7067852b3574bf40ed831bbf7b71755aef120a46b8a9ed4e271bcb605beb048" Sep 30 01:09:16 crc kubenswrapper[4809]: E0930 01:09:16.415030 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7067852b3574bf40ed831bbf7b71755aef120a46b8a9ed4e271bcb605beb048\": container with ID starting with a7067852b3574bf40ed831bbf7b71755aef120a46b8a9ed4e271bcb605beb048 not found: ID does not exist" containerID="a7067852b3574bf40ed831bbf7b71755aef120a46b8a9ed4e271bcb605beb048" Sep 30 01:09:16 crc kubenswrapper[4809]: I0930 01:09:16.415063 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7067852b3574bf40ed831bbf7b71755aef120a46b8a9ed4e271bcb605beb048"} err="failed to get container status \"a7067852b3574bf40ed831bbf7b71755aef120a46b8a9ed4e271bcb605beb048\": rpc error: code = NotFound desc = could not find container \"a7067852b3574bf40ed831bbf7b71755aef120a46b8a9ed4e271bcb605beb048\": container with ID starting with a7067852b3574bf40ed831bbf7b71755aef120a46b8a9ed4e271bcb605beb048 not found: ID does not exist" Sep 30 01:09:17 crc kubenswrapper[4809]: I0930 01:09:17.708117 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="684096df-502a-401d-8fac-eaa9c2ed9337" path="/var/lib/kubelet/pods/684096df-502a-401d-8fac-eaa9c2ed9337/volumes" Sep 30 01:09:25 crc kubenswrapper[4809]: I0930 01:09:25.324436 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:09:25 crc kubenswrapper[4809]: I0930 01:09:25.325030 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:09:25 crc kubenswrapper[4809]: I0930 01:09:25.325080 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 01:09:25 crc kubenswrapper[4809]: I0930 01:09:25.325931 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 01:09:25 crc kubenswrapper[4809]: I0930 01:09:25.325984 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" gracePeriod=600 Sep 30 01:09:25 crc kubenswrapper[4809]: E0930 01:09:25.451677 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:09:26 crc kubenswrapper[4809]: I0930 01:09:26.405852 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" exitCode=0 Sep 30 01:09:26 crc kubenswrapper[4809]: I0930 01:09:26.405914 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6"} Sep 30 01:09:26 crc kubenswrapper[4809]: I0930 01:09:26.406255 4809 scope.go:117] "RemoveContainer" containerID="207c364356b5c57377501c5f3090f0528fd819671371266d7c3d3673523548d3" Sep 30 01:09:26 crc kubenswrapper[4809]: I0930 01:09:26.407238 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:09:26 crc kubenswrapper[4809]: E0930 01:09:26.407784 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:09:38 crc kubenswrapper[4809]: I0930 01:09:38.691783 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:09:38 crc kubenswrapper[4809]: E0930 01:09:38.692972 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:09:50 crc kubenswrapper[4809]: I0930 01:09:50.691848 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:09:50 crc kubenswrapper[4809]: E0930 01:09:50.692804 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:09:55 crc kubenswrapper[4809]: I0930 01:09:55.131246 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wwlbq"] Sep 30 01:09:55 crc kubenswrapper[4809]: E0930 01:09:55.132599 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="684096df-502a-401d-8fac-eaa9c2ed9337" containerName="registry-server" Sep 30 01:09:55 crc kubenswrapper[4809]: I0930 01:09:55.132622 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="684096df-502a-401d-8fac-eaa9c2ed9337" containerName="registry-server" Sep 30 01:09:55 crc kubenswrapper[4809]: E0930 01:09:55.132712 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="684096df-502a-401d-8fac-eaa9c2ed9337" containerName="extract-content" Sep 30 01:09:55 crc kubenswrapper[4809]: I0930 01:09:55.132724 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="684096df-502a-401d-8fac-eaa9c2ed9337" containerName="extract-content" Sep 30 01:09:55 crc kubenswrapper[4809]: E0930 01:09:55.132746 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="684096df-502a-401d-8fac-eaa9c2ed9337" containerName="extract-utilities" Sep 30 01:09:55 crc kubenswrapper[4809]: I0930 01:09:55.132757 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="684096df-502a-401d-8fac-eaa9c2ed9337" containerName="extract-utilities" Sep 30 01:09:55 crc kubenswrapper[4809]: I0930 01:09:55.133106 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="684096df-502a-401d-8fac-eaa9c2ed9337" containerName="registry-server" Sep 30 01:09:55 crc kubenswrapper[4809]: I0930 01:09:55.136167 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wwlbq" Sep 30 01:09:55 crc kubenswrapper[4809]: I0930 01:09:55.172984 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wwlbq"] Sep 30 01:09:55 crc kubenswrapper[4809]: I0930 01:09:55.207521 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prwqr\" (UniqueName: \"kubernetes.io/projected/1adb980e-f14c-48c1-9e1c-4dbee8bb5516-kube-api-access-prwqr\") pod \"redhat-marketplace-wwlbq\" (UID: \"1adb980e-f14c-48c1-9e1c-4dbee8bb5516\") " pod="openshift-marketplace/redhat-marketplace-wwlbq" Sep 30 01:09:55 crc kubenswrapper[4809]: I0930 01:09:55.207655 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1adb980e-f14c-48c1-9e1c-4dbee8bb5516-catalog-content\") pod \"redhat-marketplace-wwlbq\" (UID: \"1adb980e-f14c-48c1-9e1c-4dbee8bb5516\") " pod="openshift-marketplace/redhat-marketplace-wwlbq" Sep 30 01:09:55 crc kubenswrapper[4809]: I0930 01:09:55.207748 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1adb980e-f14c-48c1-9e1c-4dbee8bb5516-utilities\") pod \"redhat-marketplace-wwlbq\" (UID: \"1adb980e-f14c-48c1-9e1c-4dbee8bb5516\") " pod="openshift-marketplace/redhat-marketplace-wwlbq" Sep 30 01:09:55 crc kubenswrapper[4809]: I0930 01:09:55.309434 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1adb980e-f14c-48c1-9e1c-4dbee8bb5516-utilities\") pod \"redhat-marketplace-wwlbq\" (UID: \"1adb980e-f14c-48c1-9e1c-4dbee8bb5516\") " pod="openshift-marketplace/redhat-marketplace-wwlbq" Sep 30 01:09:55 crc kubenswrapper[4809]: I0930 01:09:55.309617 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prwqr\" (UniqueName: \"kubernetes.io/projected/1adb980e-f14c-48c1-9e1c-4dbee8bb5516-kube-api-access-prwqr\") pod \"redhat-marketplace-wwlbq\" (UID: \"1adb980e-f14c-48c1-9e1c-4dbee8bb5516\") " pod="openshift-marketplace/redhat-marketplace-wwlbq" Sep 30 01:09:55 crc kubenswrapper[4809]: I0930 01:09:55.309721 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1adb980e-f14c-48c1-9e1c-4dbee8bb5516-catalog-content\") pod \"redhat-marketplace-wwlbq\" (UID: \"1adb980e-f14c-48c1-9e1c-4dbee8bb5516\") " pod="openshift-marketplace/redhat-marketplace-wwlbq" Sep 30 01:09:55 crc kubenswrapper[4809]: I0930 01:09:55.310000 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1adb980e-f14c-48c1-9e1c-4dbee8bb5516-utilities\") pod \"redhat-marketplace-wwlbq\" (UID: \"1adb980e-f14c-48c1-9e1c-4dbee8bb5516\") " pod="openshift-marketplace/redhat-marketplace-wwlbq" Sep 30 01:09:55 crc kubenswrapper[4809]: I0930 01:09:55.310253 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1adb980e-f14c-48c1-9e1c-4dbee8bb5516-catalog-content\") pod \"redhat-marketplace-wwlbq\" (UID: \"1adb980e-f14c-48c1-9e1c-4dbee8bb5516\") " pod="openshift-marketplace/redhat-marketplace-wwlbq" Sep 30 01:09:55 crc kubenswrapper[4809]: I0930 01:09:55.327586 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prwqr\" (UniqueName: \"kubernetes.io/projected/1adb980e-f14c-48c1-9e1c-4dbee8bb5516-kube-api-access-prwqr\") pod \"redhat-marketplace-wwlbq\" (UID: \"1adb980e-f14c-48c1-9e1c-4dbee8bb5516\") " pod="openshift-marketplace/redhat-marketplace-wwlbq" Sep 30 01:09:55 crc kubenswrapper[4809]: I0930 01:09:55.476275 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wwlbq" Sep 30 01:09:55 crc kubenswrapper[4809]: I0930 01:09:55.972842 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wwlbq"] Sep 30 01:09:56 crc kubenswrapper[4809]: I0930 01:09:56.782723 4809 generic.go:334] "Generic (PLEG): container finished" podID="1adb980e-f14c-48c1-9e1c-4dbee8bb5516" containerID="2b55000a8f56b64f7629673c3eb1a370f2b4dd9630d4179421f5fa03ffb51904" exitCode=0 Sep 30 01:09:56 crc kubenswrapper[4809]: I0930 01:09:56.782792 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wwlbq" event={"ID":"1adb980e-f14c-48c1-9e1c-4dbee8bb5516","Type":"ContainerDied","Data":"2b55000a8f56b64f7629673c3eb1a370f2b4dd9630d4179421f5fa03ffb51904"} Sep 30 01:09:56 crc kubenswrapper[4809]: I0930 01:09:56.783046 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wwlbq" event={"ID":"1adb980e-f14c-48c1-9e1c-4dbee8bb5516","Type":"ContainerStarted","Data":"2994f376a8d38ed22208e084598cdf3c54b53ebb8021f25f3c517aa0a088a5da"} Sep 30 01:09:58 crc kubenswrapper[4809]: I0930 01:09:58.812244 4809 generic.go:334] "Generic (PLEG): container finished" podID="1adb980e-f14c-48c1-9e1c-4dbee8bb5516" containerID="70f09f88648f36a1780bf10e95f56dac5fc7795ae35f049e884f4747565843a3" exitCode=0 Sep 30 01:09:58 crc kubenswrapper[4809]: I0930 01:09:58.812297 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wwlbq" event={"ID":"1adb980e-f14c-48c1-9e1c-4dbee8bb5516","Type":"ContainerDied","Data":"70f09f88648f36a1780bf10e95f56dac5fc7795ae35f049e884f4747565843a3"} Sep 30 01:09:59 crc kubenswrapper[4809]: I0930 01:09:59.826144 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wwlbq" event={"ID":"1adb980e-f14c-48c1-9e1c-4dbee8bb5516","Type":"ContainerStarted","Data":"712548eee355a1d997a9cf03259d1324270168c1c7601e87331461d7582aadf7"} Sep 30 01:09:59 crc kubenswrapper[4809]: I0930 01:09:59.853104 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wwlbq" podStartSLOduration=2.4048017760000002 podStartE2EDuration="4.853082979s" podCreationTimestamp="2025-09-30 01:09:55 +0000 UTC" firstStartedPulling="2025-09-30 01:09:56.785134596 +0000 UTC m=+3647.821384004" lastFinishedPulling="2025-09-30 01:09:59.233415799 +0000 UTC m=+3650.269665207" observedRunningTime="2025-09-30 01:09:59.849100081 +0000 UTC m=+3650.885349499" watchObservedRunningTime="2025-09-30 01:09:59.853082979 +0000 UTC m=+3650.889332387" Sep 30 01:10:01 crc kubenswrapper[4809]: I0930 01:10:01.923386 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4ns5q"] Sep 30 01:10:01 crc kubenswrapper[4809]: I0930 01:10:01.926368 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4ns5q" Sep 30 01:10:01 crc kubenswrapper[4809]: I0930 01:10:01.942120 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4ns5q"] Sep 30 01:10:01 crc kubenswrapper[4809]: I0930 01:10:01.986112 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/520ac706-b33e-4b65-a9cb-9686a3716960-utilities\") pod \"certified-operators-4ns5q\" (UID: \"520ac706-b33e-4b65-a9cb-9686a3716960\") " pod="openshift-marketplace/certified-operators-4ns5q" Sep 30 01:10:01 crc kubenswrapper[4809]: I0930 01:10:01.986318 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mq2k9\" (UniqueName: \"kubernetes.io/projected/520ac706-b33e-4b65-a9cb-9686a3716960-kube-api-access-mq2k9\") pod \"certified-operators-4ns5q\" (UID: \"520ac706-b33e-4b65-a9cb-9686a3716960\") " pod="openshift-marketplace/certified-operators-4ns5q" Sep 30 01:10:01 crc kubenswrapper[4809]: I0930 01:10:01.986400 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/520ac706-b33e-4b65-a9cb-9686a3716960-catalog-content\") pod \"certified-operators-4ns5q\" (UID: \"520ac706-b33e-4b65-a9cb-9686a3716960\") " pod="openshift-marketplace/certified-operators-4ns5q" Sep 30 01:10:02 crc kubenswrapper[4809]: I0930 01:10:02.087737 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mq2k9\" (UniqueName: \"kubernetes.io/projected/520ac706-b33e-4b65-a9cb-9686a3716960-kube-api-access-mq2k9\") pod \"certified-operators-4ns5q\" (UID: \"520ac706-b33e-4b65-a9cb-9686a3716960\") " pod="openshift-marketplace/certified-operators-4ns5q" Sep 30 01:10:02 crc kubenswrapper[4809]: I0930 01:10:02.087810 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/520ac706-b33e-4b65-a9cb-9686a3716960-catalog-content\") pod \"certified-operators-4ns5q\" (UID: \"520ac706-b33e-4b65-a9cb-9686a3716960\") " pod="openshift-marketplace/certified-operators-4ns5q" Sep 30 01:10:02 crc kubenswrapper[4809]: I0930 01:10:02.087858 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/520ac706-b33e-4b65-a9cb-9686a3716960-utilities\") pod \"certified-operators-4ns5q\" (UID: \"520ac706-b33e-4b65-a9cb-9686a3716960\") " pod="openshift-marketplace/certified-operators-4ns5q" Sep 30 01:10:02 crc kubenswrapper[4809]: I0930 01:10:02.088295 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/520ac706-b33e-4b65-a9cb-9686a3716960-utilities\") pod \"certified-operators-4ns5q\" (UID: \"520ac706-b33e-4b65-a9cb-9686a3716960\") " pod="openshift-marketplace/certified-operators-4ns5q" Sep 30 01:10:02 crc kubenswrapper[4809]: I0930 01:10:02.088369 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/520ac706-b33e-4b65-a9cb-9686a3716960-catalog-content\") pod \"certified-operators-4ns5q\" (UID: \"520ac706-b33e-4b65-a9cb-9686a3716960\") " pod="openshift-marketplace/certified-operators-4ns5q" Sep 30 01:10:02 crc kubenswrapper[4809]: I0930 01:10:02.112733 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mq2k9\" (UniqueName: \"kubernetes.io/projected/520ac706-b33e-4b65-a9cb-9686a3716960-kube-api-access-mq2k9\") pod \"certified-operators-4ns5q\" (UID: \"520ac706-b33e-4b65-a9cb-9686a3716960\") " pod="openshift-marketplace/certified-operators-4ns5q" Sep 30 01:10:02 crc kubenswrapper[4809]: I0930 01:10:02.244909 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4ns5q" Sep 30 01:10:02 crc kubenswrapper[4809]: I0930 01:10:02.807401 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4ns5q"] Sep 30 01:10:02 crc kubenswrapper[4809]: I0930 01:10:02.860074 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4ns5q" event={"ID":"520ac706-b33e-4b65-a9cb-9686a3716960","Type":"ContainerStarted","Data":"fd129457b7377cb05387d9bb820d09901a0c37fdfd6cd826afdd3aa4fbe1ecca"} Sep 30 01:10:03 crc kubenswrapper[4809]: I0930 01:10:03.871234 4809 generic.go:334] "Generic (PLEG): container finished" podID="520ac706-b33e-4b65-a9cb-9686a3716960" containerID="638195c2df281edff3c491e87f0b653e780586f1ab92dcedf13066f929b0caa0" exitCode=0 Sep 30 01:10:03 crc kubenswrapper[4809]: I0930 01:10:03.871484 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4ns5q" event={"ID":"520ac706-b33e-4b65-a9cb-9686a3716960","Type":"ContainerDied","Data":"638195c2df281edff3c491e87f0b653e780586f1ab92dcedf13066f929b0caa0"} Sep 30 01:10:04 crc kubenswrapper[4809]: I0930 01:10:04.885472 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4ns5q" event={"ID":"520ac706-b33e-4b65-a9cb-9686a3716960","Type":"ContainerStarted","Data":"218cdb43ef32cd25fdb78aa137b3c0fcce5a5115425872268f50008ce4d6a789"} Sep 30 01:10:05 crc kubenswrapper[4809]: I0930 01:10:05.477330 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wwlbq" Sep 30 01:10:05 crc kubenswrapper[4809]: I0930 01:10:05.477378 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wwlbq" Sep 30 01:10:05 crc kubenswrapper[4809]: I0930 01:10:05.525056 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wwlbq" Sep 30 01:10:05 crc kubenswrapper[4809]: I0930 01:10:05.691853 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:10:05 crc kubenswrapper[4809]: E0930 01:10:05.692207 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:10:05 crc kubenswrapper[4809]: I0930 01:10:05.896679 4809 generic.go:334] "Generic (PLEG): container finished" podID="520ac706-b33e-4b65-a9cb-9686a3716960" containerID="218cdb43ef32cd25fdb78aa137b3c0fcce5a5115425872268f50008ce4d6a789" exitCode=0 Sep 30 01:10:05 crc kubenswrapper[4809]: I0930 01:10:05.897793 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4ns5q" event={"ID":"520ac706-b33e-4b65-a9cb-9686a3716960","Type":"ContainerDied","Data":"218cdb43ef32cd25fdb78aa137b3c0fcce5a5115425872268f50008ce4d6a789"} Sep 30 01:10:06 crc kubenswrapper[4809]: I0930 01:10:06.005827 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wwlbq" Sep 30 01:10:06 crc kubenswrapper[4809]: I0930 01:10:06.913038 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4ns5q" event={"ID":"520ac706-b33e-4b65-a9cb-9686a3716960","Type":"ContainerStarted","Data":"81be88a1ec45f44dcf714b115a58ecc9abe2d8548c9a589003f92b6f2c1438a7"} Sep 30 01:10:06 crc kubenswrapper[4809]: I0930 01:10:06.914567 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wwlbq"] Sep 30 01:10:06 crc kubenswrapper[4809]: I0930 01:10:06.938675 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4ns5q" podStartSLOduration=3.486353664 podStartE2EDuration="5.938657896s" podCreationTimestamp="2025-09-30 01:10:01 +0000 UTC" firstStartedPulling="2025-09-30 01:10:03.873574981 +0000 UTC m=+3654.909824389" lastFinishedPulling="2025-09-30 01:10:06.325879203 +0000 UTC m=+3657.362128621" observedRunningTime="2025-09-30 01:10:06.934304116 +0000 UTC m=+3657.970553524" watchObservedRunningTime="2025-09-30 01:10:06.938657896 +0000 UTC m=+3657.974907304" Sep 30 01:10:07 crc kubenswrapper[4809]: I0930 01:10:07.923435 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wwlbq" podUID="1adb980e-f14c-48c1-9e1c-4dbee8bb5516" containerName="registry-server" containerID="cri-o://712548eee355a1d997a9cf03259d1324270168c1c7601e87331461d7582aadf7" gracePeriod=2 Sep 30 01:10:08 crc kubenswrapper[4809]: I0930 01:10:08.504968 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wwlbq" Sep 30 01:10:08 crc kubenswrapper[4809]: I0930 01:10:08.627657 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1adb980e-f14c-48c1-9e1c-4dbee8bb5516-catalog-content\") pod \"1adb980e-f14c-48c1-9e1c-4dbee8bb5516\" (UID: \"1adb980e-f14c-48c1-9e1c-4dbee8bb5516\") " Sep 30 01:10:08 crc kubenswrapper[4809]: I0930 01:10:08.627808 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prwqr\" (UniqueName: \"kubernetes.io/projected/1adb980e-f14c-48c1-9e1c-4dbee8bb5516-kube-api-access-prwqr\") pod \"1adb980e-f14c-48c1-9e1c-4dbee8bb5516\" (UID: \"1adb980e-f14c-48c1-9e1c-4dbee8bb5516\") " Sep 30 01:10:08 crc kubenswrapper[4809]: I0930 01:10:08.627836 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1adb980e-f14c-48c1-9e1c-4dbee8bb5516-utilities\") pod \"1adb980e-f14c-48c1-9e1c-4dbee8bb5516\" (UID: \"1adb980e-f14c-48c1-9e1c-4dbee8bb5516\") " Sep 30 01:10:08 crc kubenswrapper[4809]: I0930 01:10:08.628916 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1adb980e-f14c-48c1-9e1c-4dbee8bb5516-utilities" (OuterVolumeSpecName: "utilities") pod "1adb980e-f14c-48c1-9e1c-4dbee8bb5516" (UID: "1adb980e-f14c-48c1-9e1c-4dbee8bb5516"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:10:08 crc kubenswrapper[4809]: I0930 01:10:08.634161 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1adb980e-f14c-48c1-9e1c-4dbee8bb5516-kube-api-access-prwqr" (OuterVolumeSpecName: "kube-api-access-prwqr") pod "1adb980e-f14c-48c1-9e1c-4dbee8bb5516" (UID: "1adb980e-f14c-48c1-9e1c-4dbee8bb5516"). InnerVolumeSpecName "kube-api-access-prwqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:10:08 crc kubenswrapper[4809]: I0930 01:10:08.654497 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1adb980e-f14c-48c1-9e1c-4dbee8bb5516-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1adb980e-f14c-48c1-9e1c-4dbee8bb5516" (UID: "1adb980e-f14c-48c1-9e1c-4dbee8bb5516"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:10:08 crc kubenswrapper[4809]: I0930 01:10:08.729856 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1adb980e-f14c-48c1-9e1c-4dbee8bb5516-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:10:08 crc kubenswrapper[4809]: I0930 01:10:08.729884 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prwqr\" (UniqueName: \"kubernetes.io/projected/1adb980e-f14c-48c1-9e1c-4dbee8bb5516-kube-api-access-prwqr\") on node \"crc\" DevicePath \"\"" Sep 30 01:10:08 crc kubenswrapper[4809]: I0930 01:10:08.729896 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1adb980e-f14c-48c1-9e1c-4dbee8bb5516-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:10:08 crc kubenswrapper[4809]: I0930 01:10:08.939992 4809 generic.go:334] "Generic (PLEG): container finished" podID="1adb980e-f14c-48c1-9e1c-4dbee8bb5516" containerID="712548eee355a1d997a9cf03259d1324270168c1c7601e87331461d7582aadf7" exitCode=0 Sep 30 01:10:08 crc kubenswrapper[4809]: I0930 01:10:08.940031 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wwlbq" event={"ID":"1adb980e-f14c-48c1-9e1c-4dbee8bb5516","Type":"ContainerDied","Data":"712548eee355a1d997a9cf03259d1324270168c1c7601e87331461d7582aadf7"} Sep 30 01:10:08 crc kubenswrapper[4809]: I0930 01:10:08.940072 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wwlbq" event={"ID":"1adb980e-f14c-48c1-9e1c-4dbee8bb5516","Type":"ContainerDied","Data":"2994f376a8d38ed22208e084598cdf3c54b53ebb8021f25f3c517aa0a088a5da"} Sep 30 01:10:08 crc kubenswrapper[4809]: I0930 01:10:08.940094 4809 scope.go:117] "RemoveContainer" containerID="712548eee355a1d997a9cf03259d1324270168c1c7601e87331461d7582aadf7" Sep 30 01:10:08 crc kubenswrapper[4809]: I0930 01:10:08.940143 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wwlbq" Sep 30 01:10:08 crc kubenswrapper[4809]: I0930 01:10:08.966419 4809 scope.go:117] "RemoveContainer" containerID="70f09f88648f36a1780bf10e95f56dac5fc7795ae35f049e884f4747565843a3" Sep 30 01:10:08 crc kubenswrapper[4809]: I0930 01:10:08.998346 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wwlbq"] Sep 30 01:10:09 crc kubenswrapper[4809]: I0930 01:10:09.004340 4809 scope.go:117] "RemoveContainer" containerID="2b55000a8f56b64f7629673c3eb1a370f2b4dd9630d4179421f5fa03ffb51904" Sep 30 01:10:09 crc kubenswrapper[4809]: I0930 01:10:09.024705 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wwlbq"] Sep 30 01:10:09 crc kubenswrapper[4809]: I0930 01:10:09.084700 4809 scope.go:117] "RemoveContainer" containerID="712548eee355a1d997a9cf03259d1324270168c1c7601e87331461d7582aadf7" Sep 30 01:10:09 crc kubenswrapper[4809]: E0930 01:10:09.085423 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"712548eee355a1d997a9cf03259d1324270168c1c7601e87331461d7582aadf7\": container with ID starting with 712548eee355a1d997a9cf03259d1324270168c1c7601e87331461d7582aadf7 not found: ID does not exist" containerID="712548eee355a1d997a9cf03259d1324270168c1c7601e87331461d7582aadf7" Sep 30 01:10:09 crc kubenswrapper[4809]: I0930 01:10:09.085461 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"712548eee355a1d997a9cf03259d1324270168c1c7601e87331461d7582aadf7"} err="failed to get container status \"712548eee355a1d997a9cf03259d1324270168c1c7601e87331461d7582aadf7\": rpc error: code = NotFound desc = could not find container \"712548eee355a1d997a9cf03259d1324270168c1c7601e87331461d7582aadf7\": container with ID starting with 712548eee355a1d997a9cf03259d1324270168c1c7601e87331461d7582aadf7 not found: ID does not exist" Sep 30 01:10:09 crc kubenswrapper[4809]: I0930 01:10:09.085487 4809 scope.go:117] "RemoveContainer" containerID="70f09f88648f36a1780bf10e95f56dac5fc7795ae35f049e884f4747565843a3" Sep 30 01:10:09 crc kubenswrapper[4809]: E0930 01:10:09.085712 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70f09f88648f36a1780bf10e95f56dac5fc7795ae35f049e884f4747565843a3\": container with ID starting with 70f09f88648f36a1780bf10e95f56dac5fc7795ae35f049e884f4747565843a3 not found: ID does not exist" containerID="70f09f88648f36a1780bf10e95f56dac5fc7795ae35f049e884f4747565843a3" Sep 30 01:10:09 crc kubenswrapper[4809]: I0930 01:10:09.085743 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70f09f88648f36a1780bf10e95f56dac5fc7795ae35f049e884f4747565843a3"} err="failed to get container status \"70f09f88648f36a1780bf10e95f56dac5fc7795ae35f049e884f4747565843a3\": rpc error: code = NotFound desc = could not find container \"70f09f88648f36a1780bf10e95f56dac5fc7795ae35f049e884f4747565843a3\": container with ID starting with 70f09f88648f36a1780bf10e95f56dac5fc7795ae35f049e884f4747565843a3 not found: ID does not exist" Sep 30 01:10:09 crc kubenswrapper[4809]: I0930 01:10:09.085757 4809 scope.go:117] "RemoveContainer" containerID="2b55000a8f56b64f7629673c3eb1a370f2b4dd9630d4179421f5fa03ffb51904" Sep 30 01:10:09 crc kubenswrapper[4809]: E0930 01:10:09.086055 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b55000a8f56b64f7629673c3eb1a370f2b4dd9630d4179421f5fa03ffb51904\": container with ID starting with 2b55000a8f56b64f7629673c3eb1a370f2b4dd9630d4179421f5fa03ffb51904 not found: ID does not exist" containerID="2b55000a8f56b64f7629673c3eb1a370f2b4dd9630d4179421f5fa03ffb51904" Sep 30 01:10:09 crc kubenswrapper[4809]: I0930 01:10:09.086084 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b55000a8f56b64f7629673c3eb1a370f2b4dd9630d4179421f5fa03ffb51904"} err="failed to get container status \"2b55000a8f56b64f7629673c3eb1a370f2b4dd9630d4179421f5fa03ffb51904\": rpc error: code = NotFound desc = could not find container \"2b55000a8f56b64f7629673c3eb1a370f2b4dd9630d4179421f5fa03ffb51904\": container with ID starting with 2b55000a8f56b64f7629673c3eb1a370f2b4dd9630d4179421f5fa03ffb51904 not found: ID does not exist" Sep 30 01:10:09 crc kubenswrapper[4809]: I0930 01:10:09.708953 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1adb980e-f14c-48c1-9e1c-4dbee8bb5516" path="/var/lib/kubelet/pods/1adb980e-f14c-48c1-9e1c-4dbee8bb5516/volumes" Sep 30 01:10:09 crc kubenswrapper[4809]: I0930 01:10:09.954359 4809 generic.go:334] "Generic (PLEG): container finished" podID="6338d96e-b4cf-4390-b303-f7eb46f3e68a" containerID="08ec608ecdc72c4bf23792b3b435d45460bc3c47995c91d1a8545e46c722ef72" exitCode=0 Sep 30 01:10:09 crc kubenswrapper[4809]: I0930 01:10:09.954453 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" event={"ID":"6338d96e-b4cf-4390-b303-f7eb46f3e68a","Type":"ContainerDied","Data":"08ec608ecdc72c4bf23792b3b435d45460bc3c47995c91d1a8545e46c722ef72"} Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.643686 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.696797 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-ssh-key\") pod \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.696883 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-nova-metadata-neutron-config-0\") pod \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.696969 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-neutron-metadata-combined-ca-bundle\") pod \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.697041 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mbqvh\" (UniqueName: \"kubernetes.io/projected/6338d96e-b4cf-4390-b303-f7eb46f3e68a-kube-api-access-mbqvh\") pod \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.697075 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-inventory\") pod \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.697240 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-neutron-ovn-metadata-agent-neutron-config-0\") pod \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.697257 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-ceph\") pod \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\" (UID: \"6338d96e-b4cf-4390-b303-f7eb46f3e68a\") " Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.703717 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-ceph" (OuterVolumeSpecName: "ceph") pod "6338d96e-b4cf-4390-b303-f7eb46f3e68a" (UID: "6338d96e-b4cf-4390-b303-f7eb46f3e68a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.705066 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6338d96e-b4cf-4390-b303-f7eb46f3e68a-kube-api-access-mbqvh" (OuterVolumeSpecName: "kube-api-access-mbqvh") pod "6338d96e-b4cf-4390-b303-f7eb46f3e68a" (UID: "6338d96e-b4cf-4390-b303-f7eb46f3e68a"). InnerVolumeSpecName "kube-api-access-mbqvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.711950 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "6338d96e-b4cf-4390-b303-f7eb46f3e68a" (UID: "6338d96e-b4cf-4390-b303-f7eb46f3e68a"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.734701 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6338d96e-b4cf-4390-b303-f7eb46f3e68a" (UID: "6338d96e-b4cf-4390-b303-f7eb46f3e68a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.735727 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "6338d96e-b4cf-4390-b303-f7eb46f3e68a" (UID: "6338d96e-b4cf-4390-b303-f7eb46f3e68a"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.750779 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "6338d96e-b4cf-4390-b303-f7eb46f3e68a" (UID: "6338d96e-b4cf-4390-b303-f7eb46f3e68a"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.769344 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-inventory" (OuterVolumeSpecName: "inventory") pod "6338d96e-b4cf-4390-b303-f7eb46f3e68a" (UID: "6338d96e-b4cf-4390-b303-f7eb46f3e68a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.800057 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.800272 4809 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.800334 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.800389 4809 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.800453 4809 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.800508 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mbqvh\" (UniqueName: \"kubernetes.io/projected/6338d96e-b4cf-4390-b303-f7eb46f3e68a-kube-api-access-mbqvh\") on node \"crc\" DevicePath \"\"" Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.800560 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6338d96e-b4cf-4390-b303-f7eb46f3e68a-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.978836 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" event={"ID":"6338d96e-b4cf-4390-b303-f7eb46f3e68a","Type":"ContainerDied","Data":"3db3eeb2e5f616cc8074a7e8c885cdc7284421cbba2e6ba1439225a964072d35"} Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.978874 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3db3eeb2e5f616cc8074a7e8c885cdc7284421cbba2e6ba1439225a964072d35" Sep 30 01:10:11 crc kubenswrapper[4809]: I0930 01:10:11.978926 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.124162 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw"] Sep 30 01:10:12 crc kubenswrapper[4809]: E0930 01:10:12.124794 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6338d96e-b4cf-4390-b303-f7eb46f3e68a" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.124817 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="6338d96e-b4cf-4390-b303-f7eb46f3e68a" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 30 01:10:12 crc kubenswrapper[4809]: E0930 01:10:12.124839 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1adb980e-f14c-48c1-9e1c-4dbee8bb5516" containerName="extract-content" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.124847 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="1adb980e-f14c-48c1-9e1c-4dbee8bb5516" containerName="extract-content" Sep 30 01:10:12 crc kubenswrapper[4809]: E0930 01:10:12.124860 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1adb980e-f14c-48c1-9e1c-4dbee8bb5516" containerName="extract-utilities" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.124868 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="1adb980e-f14c-48c1-9e1c-4dbee8bb5516" containerName="extract-utilities" Sep 30 01:10:12 crc kubenswrapper[4809]: E0930 01:10:12.124898 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1adb980e-f14c-48c1-9e1c-4dbee8bb5516" containerName="registry-server" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.124906 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="1adb980e-f14c-48c1-9e1c-4dbee8bb5516" containerName="registry-server" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.125157 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="1adb980e-f14c-48c1-9e1c-4dbee8bb5516" containerName="registry-server" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.125200 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="6338d96e-b4cf-4390-b303-f7eb46f3e68a" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.126078 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.129063 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.129125 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.129255 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.129314 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.129415 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.129524 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.135227 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw"] Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.214482 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cz7d\" (UniqueName: \"kubernetes.io/projected/4886a4c8-eb31-4729-9364-86652f1284c8-kube-api-access-6cz7d\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mgthw\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.214537 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mgthw\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.214632 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mgthw\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.214794 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mgthw\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.215057 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mgthw\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.215140 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mgthw\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.245900 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4ns5q" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.245953 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4ns5q" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.292825 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4ns5q" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.317612 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mgthw\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.317788 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cz7d\" (UniqueName: \"kubernetes.io/projected/4886a4c8-eb31-4729-9364-86652f1284c8-kube-api-access-6cz7d\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mgthw\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.317845 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mgthw\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.317918 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mgthw\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.317986 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mgthw\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.318023 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mgthw\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.321328 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mgthw\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.321695 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mgthw\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.322057 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mgthw\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.322210 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mgthw\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.323546 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mgthw\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.335463 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cz7d\" (UniqueName: \"kubernetes.io/projected/4886a4c8-eb31-4729-9364-86652f1284c8-kube-api-access-6cz7d\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mgthw\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:10:12 crc kubenswrapper[4809]: I0930 01:10:12.450325 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:10:13 crc kubenswrapper[4809]: I0930 01:10:13.005738 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw"] Sep 30 01:10:13 crc kubenswrapper[4809]: W0930 01:10:13.016848 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4886a4c8_eb31_4729_9364_86652f1284c8.slice/crio-ec8e622cc8c248bfb06d98e29c58bc8537ae3f2eb29167dda94b1b688da0d004 WatchSource:0}: Error finding container ec8e622cc8c248bfb06d98e29c58bc8537ae3f2eb29167dda94b1b688da0d004: Status 404 returned error can't find the container with id ec8e622cc8c248bfb06d98e29c58bc8537ae3f2eb29167dda94b1b688da0d004 Sep 30 01:10:13 crc kubenswrapper[4809]: I0930 01:10:13.040363 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4ns5q" Sep 30 01:10:13 crc kubenswrapper[4809]: I0930 01:10:13.526589 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4ns5q"] Sep 30 01:10:14 crc kubenswrapper[4809]: I0930 01:10:14.000378 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" event={"ID":"4886a4c8-eb31-4729-9364-86652f1284c8","Type":"ContainerStarted","Data":"75090b18eafd0e674f5b28af742f364745ff097799279d0a90b5596a995f5b51"} Sep 30 01:10:14 crc kubenswrapper[4809]: I0930 01:10:14.000448 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" event={"ID":"4886a4c8-eb31-4729-9364-86652f1284c8","Type":"ContainerStarted","Data":"ec8e622cc8c248bfb06d98e29c58bc8537ae3f2eb29167dda94b1b688da0d004"} Sep 30 01:10:14 crc kubenswrapper[4809]: I0930 01:10:14.025247 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" podStartSLOduration=1.57642315 podStartE2EDuration="2.025228758s" podCreationTimestamp="2025-09-30 01:10:12 +0000 UTC" firstStartedPulling="2025-09-30 01:10:13.019500977 +0000 UTC m=+3664.055750385" lastFinishedPulling="2025-09-30 01:10:13.468306555 +0000 UTC m=+3664.504555993" observedRunningTime="2025-09-30 01:10:14.020361616 +0000 UTC m=+3665.056611034" watchObservedRunningTime="2025-09-30 01:10:14.025228758 +0000 UTC m=+3665.061478166" Sep 30 01:10:15 crc kubenswrapper[4809]: I0930 01:10:15.012900 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4ns5q" podUID="520ac706-b33e-4b65-a9cb-9686a3716960" containerName="registry-server" containerID="cri-o://81be88a1ec45f44dcf714b115a58ecc9abe2d8548c9a589003f92b6f2c1438a7" gracePeriod=2 Sep 30 01:10:15 crc kubenswrapper[4809]: I0930 01:10:15.546750 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4ns5q" Sep 30 01:10:15 crc kubenswrapper[4809]: I0930 01:10:15.588705 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/520ac706-b33e-4b65-a9cb-9686a3716960-utilities\") pod \"520ac706-b33e-4b65-a9cb-9686a3716960\" (UID: \"520ac706-b33e-4b65-a9cb-9686a3716960\") " Sep 30 01:10:15 crc kubenswrapper[4809]: I0930 01:10:15.588812 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mq2k9\" (UniqueName: \"kubernetes.io/projected/520ac706-b33e-4b65-a9cb-9686a3716960-kube-api-access-mq2k9\") pod \"520ac706-b33e-4b65-a9cb-9686a3716960\" (UID: \"520ac706-b33e-4b65-a9cb-9686a3716960\") " Sep 30 01:10:15 crc kubenswrapper[4809]: I0930 01:10:15.589023 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/520ac706-b33e-4b65-a9cb-9686a3716960-catalog-content\") pod \"520ac706-b33e-4b65-a9cb-9686a3716960\" (UID: \"520ac706-b33e-4b65-a9cb-9686a3716960\") " Sep 30 01:10:15 crc kubenswrapper[4809]: I0930 01:10:15.589629 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/520ac706-b33e-4b65-a9cb-9686a3716960-utilities" (OuterVolumeSpecName: "utilities") pod "520ac706-b33e-4b65-a9cb-9686a3716960" (UID: "520ac706-b33e-4b65-a9cb-9686a3716960"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:10:15 crc kubenswrapper[4809]: I0930 01:10:15.594756 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/520ac706-b33e-4b65-a9cb-9686a3716960-kube-api-access-mq2k9" (OuterVolumeSpecName: "kube-api-access-mq2k9") pod "520ac706-b33e-4b65-a9cb-9686a3716960" (UID: "520ac706-b33e-4b65-a9cb-9686a3716960"). InnerVolumeSpecName "kube-api-access-mq2k9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:10:15 crc kubenswrapper[4809]: I0930 01:10:15.636278 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/520ac706-b33e-4b65-a9cb-9686a3716960-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "520ac706-b33e-4b65-a9cb-9686a3716960" (UID: "520ac706-b33e-4b65-a9cb-9686a3716960"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:10:15 crc kubenswrapper[4809]: I0930 01:10:15.692122 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/520ac706-b33e-4b65-a9cb-9686a3716960-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:10:15 crc kubenswrapper[4809]: I0930 01:10:15.692472 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/520ac706-b33e-4b65-a9cb-9686a3716960-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:10:15 crc kubenswrapper[4809]: I0930 01:10:15.692502 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mq2k9\" (UniqueName: \"kubernetes.io/projected/520ac706-b33e-4b65-a9cb-9686a3716960-kube-api-access-mq2k9\") on node \"crc\" DevicePath \"\"" Sep 30 01:10:16 crc kubenswrapper[4809]: I0930 01:10:16.027320 4809 generic.go:334] "Generic (PLEG): container finished" podID="520ac706-b33e-4b65-a9cb-9686a3716960" containerID="81be88a1ec45f44dcf714b115a58ecc9abe2d8548c9a589003f92b6f2c1438a7" exitCode=0 Sep 30 01:10:16 crc kubenswrapper[4809]: I0930 01:10:16.027412 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4ns5q" event={"ID":"520ac706-b33e-4b65-a9cb-9686a3716960","Type":"ContainerDied","Data":"81be88a1ec45f44dcf714b115a58ecc9abe2d8548c9a589003f92b6f2c1438a7"} Sep 30 01:10:16 crc kubenswrapper[4809]: I0930 01:10:16.027455 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4ns5q" Sep 30 01:10:16 crc kubenswrapper[4809]: I0930 01:10:16.027506 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4ns5q" event={"ID":"520ac706-b33e-4b65-a9cb-9686a3716960","Type":"ContainerDied","Data":"fd129457b7377cb05387d9bb820d09901a0c37fdfd6cd826afdd3aa4fbe1ecca"} Sep 30 01:10:16 crc kubenswrapper[4809]: I0930 01:10:16.027541 4809 scope.go:117] "RemoveContainer" containerID="81be88a1ec45f44dcf714b115a58ecc9abe2d8548c9a589003f92b6f2c1438a7" Sep 30 01:10:16 crc kubenswrapper[4809]: I0930 01:10:16.059024 4809 scope.go:117] "RemoveContainer" containerID="218cdb43ef32cd25fdb78aa137b3c0fcce5a5115425872268f50008ce4d6a789" Sep 30 01:10:16 crc kubenswrapper[4809]: I0930 01:10:16.066361 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4ns5q"] Sep 30 01:10:16 crc kubenswrapper[4809]: I0930 01:10:16.075776 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4ns5q"] Sep 30 01:10:16 crc kubenswrapper[4809]: I0930 01:10:16.105907 4809 scope.go:117] "RemoveContainer" containerID="638195c2df281edff3c491e87f0b653e780586f1ab92dcedf13066f929b0caa0" Sep 30 01:10:16 crc kubenswrapper[4809]: I0930 01:10:16.225460 4809 scope.go:117] "RemoveContainer" containerID="81be88a1ec45f44dcf714b115a58ecc9abe2d8548c9a589003f92b6f2c1438a7" Sep 30 01:10:16 crc kubenswrapper[4809]: E0930 01:10:16.230052 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81be88a1ec45f44dcf714b115a58ecc9abe2d8548c9a589003f92b6f2c1438a7\": container with ID starting with 81be88a1ec45f44dcf714b115a58ecc9abe2d8548c9a589003f92b6f2c1438a7 not found: ID does not exist" containerID="81be88a1ec45f44dcf714b115a58ecc9abe2d8548c9a589003f92b6f2c1438a7" Sep 30 01:10:16 crc kubenswrapper[4809]: I0930 01:10:16.230088 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81be88a1ec45f44dcf714b115a58ecc9abe2d8548c9a589003f92b6f2c1438a7"} err="failed to get container status \"81be88a1ec45f44dcf714b115a58ecc9abe2d8548c9a589003f92b6f2c1438a7\": rpc error: code = NotFound desc = could not find container \"81be88a1ec45f44dcf714b115a58ecc9abe2d8548c9a589003f92b6f2c1438a7\": container with ID starting with 81be88a1ec45f44dcf714b115a58ecc9abe2d8548c9a589003f92b6f2c1438a7 not found: ID does not exist" Sep 30 01:10:16 crc kubenswrapper[4809]: I0930 01:10:16.230112 4809 scope.go:117] "RemoveContainer" containerID="218cdb43ef32cd25fdb78aa137b3c0fcce5a5115425872268f50008ce4d6a789" Sep 30 01:10:16 crc kubenswrapper[4809]: E0930 01:10:16.234069 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"218cdb43ef32cd25fdb78aa137b3c0fcce5a5115425872268f50008ce4d6a789\": container with ID starting with 218cdb43ef32cd25fdb78aa137b3c0fcce5a5115425872268f50008ce4d6a789 not found: ID does not exist" containerID="218cdb43ef32cd25fdb78aa137b3c0fcce5a5115425872268f50008ce4d6a789" Sep 30 01:10:16 crc kubenswrapper[4809]: I0930 01:10:16.234104 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"218cdb43ef32cd25fdb78aa137b3c0fcce5a5115425872268f50008ce4d6a789"} err="failed to get container status \"218cdb43ef32cd25fdb78aa137b3c0fcce5a5115425872268f50008ce4d6a789\": rpc error: code = NotFound desc = could not find container \"218cdb43ef32cd25fdb78aa137b3c0fcce5a5115425872268f50008ce4d6a789\": container with ID starting with 218cdb43ef32cd25fdb78aa137b3c0fcce5a5115425872268f50008ce4d6a789 not found: ID does not exist" Sep 30 01:10:16 crc kubenswrapper[4809]: I0930 01:10:16.234124 4809 scope.go:117] "RemoveContainer" containerID="638195c2df281edff3c491e87f0b653e780586f1ab92dcedf13066f929b0caa0" Sep 30 01:10:16 crc kubenswrapper[4809]: E0930 01:10:16.239979 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"638195c2df281edff3c491e87f0b653e780586f1ab92dcedf13066f929b0caa0\": container with ID starting with 638195c2df281edff3c491e87f0b653e780586f1ab92dcedf13066f929b0caa0 not found: ID does not exist" containerID="638195c2df281edff3c491e87f0b653e780586f1ab92dcedf13066f929b0caa0" Sep 30 01:10:16 crc kubenswrapper[4809]: I0930 01:10:16.240017 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"638195c2df281edff3c491e87f0b653e780586f1ab92dcedf13066f929b0caa0"} err="failed to get container status \"638195c2df281edff3c491e87f0b653e780586f1ab92dcedf13066f929b0caa0\": rpc error: code = NotFound desc = could not find container \"638195c2df281edff3c491e87f0b653e780586f1ab92dcedf13066f929b0caa0\": container with ID starting with 638195c2df281edff3c491e87f0b653e780586f1ab92dcedf13066f929b0caa0 not found: ID does not exist" Sep 30 01:10:17 crc kubenswrapper[4809]: I0930 01:10:17.712328 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="520ac706-b33e-4b65-a9cb-9686a3716960" path="/var/lib/kubelet/pods/520ac706-b33e-4b65-a9cb-9686a3716960/volumes" Sep 30 01:10:18 crc kubenswrapper[4809]: I0930 01:10:18.691674 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:10:18 crc kubenswrapper[4809]: E0930 01:10:18.692388 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:10:32 crc kubenswrapper[4809]: I0930 01:10:32.691142 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:10:32 crc kubenswrapper[4809]: E0930 01:10:32.691889 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:10:45 crc kubenswrapper[4809]: I0930 01:10:45.691273 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:10:45 crc kubenswrapper[4809]: E0930 01:10:45.692457 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:10:59 crc kubenswrapper[4809]: I0930 01:10:59.699923 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:10:59 crc kubenswrapper[4809]: E0930 01:10:59.701132 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:11:11 crc kubenswrapper[4809]: I0930 01:11:11.691469 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:11:11 crc kubenswrapper[4809]: E0930 01:11:11.692957 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:11:23 crc kubenswrapper[4809]: I0930 01:11:23.691698 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:11:23 crc kubenswrapper[4809]: E0930 01:11:23.693006 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:11:36 crc kubenswrapper[4809]: I0930 01:11:36.691414 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:11:36 crc kubenswrapper[4809]: E0930 01:11:36.692181 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:11:51 crc kubenswrapper[4809]: I0930 01:11:51.691861 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:11:51 crc kubenswrapper[4809]: E0930 01:11:51.693089 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:12:03 crc kubenswrapper[4809]: I0930 01:12:03.692341 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:12:03 crc kubenswrapper[4809]: E0930 01:12:03.693578 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:12:14 crc kubenswrapper[4809]: I0930 01:12:14.691077 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:12:14 crc kubenswrapper[4809]: E0930 01:12:14.691896 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:12:28 crc kubenswrapper[4809]: I0930 01:12:28.692016 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:12:28 crc kubenswrapper[4809]: E0930 01:12:28.692895 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:12:39 crc kubenswrapper[4809]: I0930 01:12:39.701686 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:12:39 crc kubenswrapper[4809]: E0930 01:12:39.703009 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:12:53 crc kubenswrapper[4809]: I0930 01:12:53.692134 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:12:53 crc kubenswrapper[4809]: E0930 01:12:53.694122 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:13:06 crc kubenswrapper[4809]: I0930 01:13:06.691867 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:13:06 crc kubenswrapper[4809]: E0930 01:13:06.692840 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:13:20 crc kubenswrapper[4809]: I0930 01:13:20.691559 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:13:20 crc kubenswrapper[4809]: E0930 01:13:20.692445 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:13:31 crc kubenswrapper[4809]: I0930 01:13:31.692422 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:13:31 crc kubenswrapper[4809]: E0930 01:13:31.693696 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:13:42 crc kubenswrapper[4809]: I0930 01:13:42.691018 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:13:42 crc kubenswrapper[4809]: E0930 01:13:42.691853 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:13:44 crc kubenswrapper[4809]: I0930 01:13:44.608170 4809 generic.go:334] "Generic (PLEG): container finished" podID="4886a4c8-eb31-4729-9364-86652f1284c8" containerID="75090b18eafd0e674f5b28af742f364745ff097799279d0a90b5596a995f5b51" exitCode=0 Sep 30 01:13:44 crc kubenswrapper[4809]: I0930 01:13:44.608281 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" event={"ID":"4886a4c8-eb31-4729-9364-86652f1284c8","Type":"ContainerDied","Data":"75090b18eafd0e674f5b28af742f364745ff097799279d0a90b5596a995f5b51"} Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.128262 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.204455 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-inventory\") pod \"4886a4c8-eb31-4729-9364-86652f1284c8\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.204516 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-ssh-key\") pod \"4886a4c8-eb31-4729-9364-86652f1284c8\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.204584 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-libvirt-combined-ca-bundle\") pod \"4886a4c8-eb31-4729-9364-86652f1284c8\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.204750 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-ceph\") pod \"4886a4c8-eb31-4729-9364-86652f1284c8\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.204779 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-libvirt-secret-0\") pod \"4886a4c8-eb31-4729-9364-86652f1284c8\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.204822 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cz7d\" (UniqueName: \"kubernetes.io/projected/4886a4c8-eb31-4729-9364-86652f1284c8-kube-api-access-6cz7d\") pod \"4886a4c8-eb31-4729-9364-86652f1284c8\" (UID: \"4886a4c8-eb31-4729-9364-86652f1284c8\") " Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.210599 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4886a4c8-eb31-4729-9364-86652f1284c8-kube-api-access-6cz7d" (OuterVolumeSpecName: "kube-api-access-6cz7d") pod "4886a4c8-eb31-4729-9364-86652f1284c8" (UID: "4886a4c8-eb31-4729-9364-86652f1284c8"). InnerVolumeSpecName "kube-api-access-6cz7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.211165 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-ceph" (OuterVolumeSpecName: "ceph") pod "4886a4c8-eb31-4729-9364-86652f1284c8" (UID: "4886a4c8-eb31-4729-9364-86652f1284c8"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.211793 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "4886a4c8-eb31-4729-9364-86652f1284c8" (UID: "4886a4c8-eb31-4729-9364-86652f1284c8"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.243415 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "4886a4c8-eb31-4729-9364-86652f1284c8" (UID: "4886a4c8-eb31-4729-9364-86652f1284c8"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.244572 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-inventory" (OuterVolumeSpecName: "inventory") pod "4886a4c8-eb31-4729-9364-86652f1284c8" (UID: "4886a4c8-eb31-4729-9364-86652f1284c8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.254995 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4886a4c8-eb31-4729-9364-86652f1284c8" (UID: "4886a4c8-eb31-4729-9364-86652f1284c8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.308189 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.308230 4809 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.308241 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cz7d\" (UniqueName: \"kubernetes.io/projected/4886a4c8-eb31-4729-9364-86652f1284c8-kube-api-access-6cz7d\") on node \"crc\" DevicePath \"\"" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.308251 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.308259 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.308271 4809 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4886a4c8-eb31-4729-9364-86652f1284c8-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.632514 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" event={"ID":"4886a4c8-eb31-4729-9364-86652f1284c8","Type":"ContainerDied","Data":"ec8e622cc8c248bfb06d98e29c58bc8537ae3f2eb29167dda94b1b688da0d004"} Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.632556 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec8e622cc8c248bfb06d98e29c58bc8537ae3f2eb29167dda94b1b688da0d004" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.632609 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mgthw" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.755370 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm"] Sep 30 01:13:46 crc kubenswrapper[4809]: E0930 01:13:46.755876 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="520ac706-b33e-4b65-a9cb-9686a3716960" containerName="extract-utilities" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.755893 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="520ac706-b33e-4b65-a9cb-9686a3716960" containerName="extract-utilities" Sep 30 01:13:46 crc kubenswrapper[4809]: E0930 01:13:46.755913 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="520ac706-b33e-4b65-a9cb-9686a3716960" containerName="extract-content" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.755920 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="520ac706-b33e-4b65-a9cb-9686a3716960" containerName="extract-content" Sep 30 01:13:46 crc kubenswrapper[4809]: E0930 01:13:46.755937 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="520ac706-b33e-4b65-a9cb-9686a3716960" containerName="registry-server" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.755943 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="520ac706-b33e-4b65-a9cb-9686a3716960" containerName="registry-server" Sep 30 01:13:46 crc kubenswrapper[4809]: E0930 01:13:46.755976 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4886a4c8-eb31-4729-9364-86652f1284c8" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.755982 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="4886a4c8-eb31-4729-9364-86652f1284c8" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.756191 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="520ac706-b33e-4b65-a9cb-9686a3716960" containerName="registry-server" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.756221 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="4886a4c8-eb31-4729-9364-86652f1284c8" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.756977 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.759058 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.759707 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.759835 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.760120 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.760219 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ceph-nova" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.760497 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.760709 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.760872 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.761328 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.771545 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm"] Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.822753 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.822803 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.822936 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.822999 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.823028 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjwcp\" (UniqueName: \"kubernetes.io/projected/a976aeec-864a-445c-8fc2-5e5d53332dce-kube-api-access-vjwcp\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.823050 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/a976aeec-864a-445c-8fc2-5e5d53332dce-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.823135 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.823158 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.823243 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.823299 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.823332 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.925307 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.925446 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.925493 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.925521 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.925599 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.925627 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.925715 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.925757 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.925789 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjwcp\" (UniqueName: \"kubernetes.io/projected/a976aeec-864a-445c-8fc2-5e5d53332dce-kube-api-access-vjwcp\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.925813 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/a976aeec-864a-445c-8fc2-5e5d53332dce-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.925880 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.926791 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.927307 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/a976aeec-864a-445c-8fc2-5e5d53332dce-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.929535 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.929955 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.930230 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.930695 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.931181 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.931668 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.932179 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.932809 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:46 crc kubenswrapper[4809]: I0930 01:13:46.948563 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjwcp\" (UniqueName: \"kubernetes.io/projected/a976aeec-864a-445c-8fc2-5e5d53332dce-kube-api-access-vjwcp\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:47 crc kubenswrapper[4809]: I0930 01:13:47.077512 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:13:47 crc kubenswrapper[4809]: I0930 01:13:47.683978 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm"] Sep 30 01:13:47 crc kubenswrapper[4809]: I0930 01:13:47.687189 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 01:13:48 crc kubenswrapper[4809]: I0930 01:13:48.668058 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" event={"ID":"a976aeec-864a-445c-8fc2-5e5d53332dce","Type":"ContainerStarted","Data":"89cb7400d0c4d89d5532400aa321f6641c507c738d12635fc940c80a0955aad9"} Sep 30 01:13:48 crc kubenswrapper[4809]: I0930 01:13:48.668476 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" event={"ID":"a976aeec-864a-445c-8fc2-5e5d53332dce","Type":"ContainerStarted","Data":"5f86e0f9474e032b0a5f822e3ea1d4be7838d3fc76b3c27b1512d515dbef5421"} Sep 30 01:13:48 crc kubenswrapper[4809]: I0930 01:13:48.717264 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" podStartSLOduration=2.28560351 podStartE2EDuration="2.717243228s" podCreationTimestamp="2025-09-30 01:13:46 +0000 UTC" firstStartedPulling="2025-09-30 01:13:47.686905453 +0000 UTC m=+3878.723154861" lastFinishedPulling="2025-09-30 01:13:48.118545171 +0000 UTC m=+3879.154794579" observedRunningTime="2025-09-30 01:13:48.710392001 +0000 UTC m=+3879.746641409" watchObservedRunningTime="2025-09-30 01:13:48.717243228 +0000 UTC m=+3879.753492636" Sep 30 01:13:57 crc kubenswrapper[4809]: I0930 01:13:57.691826 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:13:57 crc kubenswrapper[4809]: E0930 01:13:57.692983 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:14:12 crc kubenswrapper[4809]: I0930 01:14:12.692209 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:14:12 crc kubenswrapper[4809]: E0930 01:14:12.693436 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:14:17 crc kubenswrapper[4809]: I0930 01:14:17.394600 4809 scope.go:117] "RemoveContainer" containerID="5ffd4ad54a3be171b401b59beac2d4f97f4c884dbf5ef44b58307f7fdf904c83" Sep 30 01:14:17 crc kubenswrapper[4809]: I0930 01:14:17.420981 4809 scope.go:117] "RemoveContainer" containerID="7901f6d5489124dab98071cc69b7bbfaf3282053f59bff75ae32b05bffbe855e" Sep 30 01:14:17 crc kubenswrapper[4809]: I0930 01:14:17.476203 4809 scope.go:117] "RemoveContainer" containerID="677ec8f746fdd4788ac751c8ab0a6d226353decf2980ede595e6252eaf039904" Sep 30 01:14:27 crc kubenswrapper[4809]: I0930 01:14:27.692295 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:14:28 crc kubenswrapper[4809]: I0930 01:14:28.135566 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"be39589ae6a698fecbd8e348e88db132298ce2169316f433161d54ed425c1718"} Sep 30 01:15:00 crc kubenswrapper[4809]: I0930 01:15:00.168787 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr"] Sep 30 01:15:00 crc kubenswrapper[4809]: I0930 01:15:00.171010 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr" Sep 30 01:15:00 crc kubenswrapper[4809]: I0930 01:15:00.172972 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 01:15:00 crc kubenswrapper[4809]: I0930 01:15:00.174305 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 01:15:00 crc kubenswrapper[4809]: I0930 01:15:00.192522 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr"] Sep 30 01:15:00 crc kubenswrapper[4809]: I0930 01:15:00.327406 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/66aef030-5824-4b79-a039-650509c79935-secret-volume\") pod \"collect-profiles-29319915-8gcbr\" (UID: \"66aef030-5824-4b79-a039-650509c79935\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr" Sep 30 01:15:00 crc kubenswrapper[4809]: I0930 01:15:00.327870 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/66aef030-5824-4b79-a039-650509c79935-config-volume\") pod \"collect-profiles-29319915-8gcbr\" (UID: \"66aef030-5824-4b79-a039-650509c79935\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr" Sep 30 01:15:00 crc kubenswrapper[4809]: I0930 01:15:00.328272 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gp9m\" (UniqueName: \"kubernetes.io/projected/66aef030-5824-4b79-a039-650509c79935-kube-api-access-2gp9m\") pod \"collect-profiles-29319915-8gcbr\" (UID: \"66aef030-5824-4b79-a039-650509c79935\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr" Sep 30 01:15:00 crc kubenswrapper[4809]: I0930 01:15:00.430194 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gp9m\" (UniqueName: \"kubernetes.io/projected/66aef030-5824-4b79-a039-650509c79935-kube-api-access-2gp9m\") pod \"collect-profiles-29319915-8gcbr\" (UID: \"66aef030-5824-4b79-a039-650509c79935\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr" Sep 30 01:15:00 crc kubenswrapper[4809]: I0930 01:15:00.430245 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/66aef030-5824-4b79-a039-650509c79935-secret-volume\") pod \"collect-profiles-29319915-8gcbr\" (UID: \"66aef030-5824-4b79-a039-650509c79935\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr" Sep 30 01:15:00 crc kubenswrapper[4809]: I0930 01:15:00.430345 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/66aef030-5824-4b79-a039-650509c79935-config-volume\") pod \"collect-profiles-29319915-8gcbr\" (UID: \"66aef030-5824-4b79-a039-650509c79935\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr" Sep 30 01:15:00 crc kubenswrapper[4809]: I0930 01:15:00.431238 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/66aef030-5824-4b79-a039-650509c79935-config-volume\") pod \"collect-profiles-29319915-8gcbr\" (UID: \"66aef030-5824-4b79-a039-650509c79935\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr" Sep 30 01:15:00 crc kubenswrapper[4809]: I0930 01:15:00.450049 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/66aef030-5824-4b79-a039-650509c79935-secret-volume\") pod \"collect-profiles-29319915-8gcbr\" (UID: \"66aef030-5824-4b79-a039-650509c79935\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr" Sep 30 01:15:00 crc kubenswrapper[4809]: I0930 01:15:00.454268 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gp9m\" (UniqueName: \"kubernetes.io/projected/66aef030-5824-4b79-a039-650509c79935-kube-api-access-2gp9m\") pod \"collect-profiles-29319915-8gcbr\" (UID: \"66aef030-5824-4b79-a039-650509c79935\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr" Sep 30 01:15:00 crc kubenswrapper[4809]: I0930 01:15:00.497541 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr" Sep 30 01:15:00 crc kubenswrapper[4809]: I0930 01:15:00.987221 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr"] Sep 30 01:15:01 crc kubenswrapper[4809]: I0930 01:15:01.521546 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr" event={"ID":"66aef030-5824-4b79-a039-650509c79935","Type":"ContainerStarted","Data":"e561ed83d2af3807088663cb7ecfb19b881982c95729b822b730bc27adfc2268"} Sep 30 01:15:01 crc kubenswrapper[4809]: I0930 01:15:01.521883 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr" event={"ID":"66aef030-5824-4b79-a039-650509c79935","Type":"ContainerStarted","Data":"60f3737cf40965262d7034ce7284f14e5acebb4746a1d3db00576ddb3c84a596"} Sep 30 01:15:01 crc kubenswrapper[4809]: I0930 01:15:01.540722 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr" podStartSLOduration=1.540703503 podStartE2EDuration="1.540703503s" podCreationTimestamp="2025-09-30 01:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 01:15:01.534624277 +0000 UTC m=+3952.570873685" watchObservedRunningTime="2025-09-30 01:15:01.540703503 +0000 UTC m=+3952.576952911" Sep 30 01:15:02 crc kubenswrapper[4809]: I0930 01:15:02.553961 4809 generic.go:334] "Generic (PLEG): container finished" podID="66aef030-5824-4b79-a039-650509c79935" containerID="e561ed83d2af3807088663cb7ecfb19b881982c95729b822b730bc27adfc2268" exitCode=0 Sep 30 01:15:02 crc kubenswrapper[4809]: I0930 01:15:02.554150 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr" event={"ID":"66aef030-5824-4b79-a039-650509c79935","Type":"ContainerDied","Data":"e561ed83d2af3807088663cb7ecfb19b881982c95729b822b730bc27adfc2268"} Sep 30 01:15:04 crc kubenswrapper[4809]: I0930 01:15:04.001214 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr" Sep 30 01:15:04 crc kubenswrapper[4809]: I0930 01:15:04.104099 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/66aef030-5824-4b79-a039-650509c79935-secret-volume\") pod \"66aef030-5824-4b79-a039-650509c79935\" (UID: \"66aef030-5824-4b79-a039-650509c79935\") " Sep 30 01:15:04 crc kubenswrapper[4809]: I0930 01:15:04.104310 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gp9m\" (UniqueName: \"kubernetes.io/projected/66aef030-5824-4b79-a039-650509c79935-kube-api-access-2gp9m\") pod \"66aef030-5824-4b79-a039-650509c79935\" (UID: \"66aef030-5824-4b79-a039-650509c79935\") " Sep 30 01:15:04 crc kubenswrapper[4809]: I0930 01:15:04.104390 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/66aef030-5824-4b79-a039-650509c79935-config-volume\") pod \"66aef030-5824-4b79-a039-650509c79935\" (UID: \"66aef030-5824-4b79-a039-650509c79935\") " Sep 30 01:15:04 crc kubenswrapper[4809]: I0930 01:15:04.105017 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66aef030-5824-4b79-a039-650509c79935-config-volume" (OuterVolumeSpecName: "config-volume") pod "66aef030-5824-4b79-a039-650509c79935" (UID: "66aef030-5824-4b79-a039-650509c79935"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:15:04 crc kubenswrapper[4809]: I0930 01:15:04.109597 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66aef030-5824-4b79-a039-650509c79935-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "66aef030-5824-4b79-a039-650509c79935" (UID: "66aef030-5824-4b79-a039-650509c79935"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:15:04 crc kubenswrapper[4809]: I0930 01:15:04.112482 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66aef030-5824-4b79-a039-650509c79935-kube-api-access-2gp9m" (OuterVolumeSpecName: "kube-api-access-2gp9m") pod "66aef030-5824-4b79-a039-650509c79935" (UID: "66aef030-5824-4b79-a039-650509c79935"). InnerVolumeSpecName "kube-api-access-2gp9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:15:04 crc kubenswrapper[4809]: I0930 01:15:04.206723 4809 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/66aef030-5824-4b79-a039-650509c79935-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 01:15:04 crc kubenswrapper[4809]: I0930 01:15:04.206758 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gp9m\" (UniqueName: \"kubernetes.io/projected/66aef030-5824-4b79-a039-650509c79935-kube-api-access-2gp9m\") on node \"crc\" DevicePath \"\"" Sep 30 01:15:04 crc kubenswrapper[4809]: I0930 01:15:04.206767 4809 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/66aef030-5824-4b79-a039-650509c79935-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 01:15:04 crc kubenswrapper[4809]: I0930 01:15:04.572725 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr" event={"ID":"66aef030-5824-4b79-a039-650509c79935","Type":"ContainerDied","Data":"60f3737cf40965262d7034ce7284f14e5acebb4746a1d3db00576ddb3c84a596"} Sep 30 01:15:04 crc kubenswrapper[4809]: I0930 01:15:04.573095 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60f3737cf40965262d7034ce7284f14e5acebb4746a1d3db00576ddb3c84a596" Sep 30 01:15:04 crc kubenswrapper[4809]: I0930 01:15:04.572771 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr" Sep 30 01:15:04 crc kubenswrapper[4809]: I0930 01:15:04.616090 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd"] Sep 30 01:15:04 crc kubenswrapper[4809]: I0930 01:15:04.624933 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319870-bz8rd"] Sep 30 01:15:05 crc kubenswrapper[4809]: I0930 01:15:05.707077 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c55f706-cdfa-49e9-8c5a-cf953606bd4d" path="/var/lib/kubelet/pods/6c55f706-cdfa-49e9-8c5a-cf953606bd4d/volumes" Sep 30 01:15:17 crc kubenswrapper[4809]: I0930 01:15:17.552408 4809 scope.go:117] "RemoveContainer" containerID="fcf4162525b4c58078f67fda805fbdee76aa6aea7e6d1012a372e58dd7e6867f" Sep 30 01:16:55 crc kubenswrapper[4809]: I0930 01:16:55.324943 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:16:55 crc kubenswrapper[4809]: I0930 01:16:55.325754 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:17:25 crc kubenswrapper[4809]: I0930 01:17:25.325126 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:17:25 crc kubenswrapper[4809]: I0930 01:17:25.325759 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:17:55 crc kubenswrapper[4809]: I0930 01:17:55.325020 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:17:55 crc kubenswrapper[4809]: I0930 01:17:55.325539 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:17:55 crc kubenswrapper[4809]: I0930 01:17:55.325585 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 01:17:55 crc kubenswrapper[4809]: I0930 01:17:55.326472 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"be39589ae6a698fecbd8e348e88db132298ce2169316f433161d54ed425c1718"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 01:17:55 crc kubenswrapper[4809]: I0930 01:17:55.326535 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://be39589ae6a698fecbd8e348e88db132298ce2169316f433161d54ed425c1718" gracePeriod=600 Sep 30 01:17:55 crc kubenswrapper[4809]: I0930 01:17:55.582244 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="be39589ae6a698fecbd8e348e88db132298ce2169316f433161d54ed425c1718" exitCode=0 Sep 30 01:17:55 crc kubenswrapper[4809]: I0930 01:17:55.582304 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"be39589ae6a698fecbd8e348e88db132298ce2169316f433161d54ed425c1718"} Sep 30 01:17:55 crc kubenswrapper[4809]: I0930 01:17:55.582601 4809 scope.go:117] "RemoveContainer" containerID="0fbaebacb731c5c367db938dfdaa284935494f568220156857d417e4efe63cd6" Sep 30 01:17:56 crc kubenswrapper[4809]: I0930 01:17:56.598055 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1"} Sep 30 01:18:33 crc kubenswrapper[4809]: I0930 01:18:33.510626 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lwj6s"] Sep 30 01:18:33 crc kubenswrapper[4809]: E0930 01:18:33.511803 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66aef030-5824-4b79-a039-650509c79935" containerName="collect-profiles" Sep 30 01:18:33 crc kubenswrapper[4809]: I0930 01:18:33.511819 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="66aef030-5824-4b79-a039-650509c79935" containerName="collect-profiles" Sep 30 01:18:33 crc kubenswrapper[4809]: I0930 01:18:33.512121 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="66aef030-5824-4b79-a039-650509c79935" containerName="collect-profiles" Sep 30 01:18:33 crc kubenswrapper[4809]: I0930 01:18:33.514222 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lwj6s" Sep 30 01:18:33 crc kubenswrapper[4809]: I0930 01:18:33.542549 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lwj6s"] Sep 30 01:18:33 crc kubenswrapper[4809]: I0930 01:18:33.586674 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mn477\" (UniqueName: \"kubernetes.io/projected/975a4e53-58dd-4e37-83d9-5c47e2ca5881-kube-api-access-mn477\") pod \"community-operators-lwj6s\" (UID: \"975a4e53-58dd-4e37-83d9-5c47e2ca5881\") " pod="openshift-marketplace/community-operators-lwj6s" Sep 30 01:18:33 crc kubenswrapper[4809]: I0930 01:18:33.586724 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/975a4e53-58dd-4e37-83d9-5c47e2ca5881-utilities\") pod \"community-operators-lwj6s\" (UID: \"975a4e53-58dd-4e37-83d9-5c47e2ca5881\") " pod="openshift-marketplace/community-operators-lwj6s" Sep 30 01:18:33 crc kubenswrapper[4809]: I0930 01:18:33.586991 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/975a4e53-58dd-4e37-83d9-5c47e2ca5881-catalog-content\") pod \"community-operators-lwj6s\" (UID: \"975a4e53-58dd-4e37-83d9-5c47e2ca5881\") " pod="openshift-marketplace/community-operators-lwj6s" Sep 30 01:18:33 crc kubenswrapper[4809]: I0930 01:18:33.688967 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mn477\" (UniqueName: \"kubernetes.io/projected/975a4e53-58dd-4e37-83d9-5c47e2ca5881-kube-api-access-mn477\") pod \"community-operators-lwj6s\" (UID: \"975a4e53-58dd-4e37-83d9-5c47e2ca5881\") " pod="openshift-marketplace/community-operators-lwj6s" Sep 30 01:18:33 crc kubenswrapper[4809]: I0930 01:18:33.689280 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/975a4e53-58dd-4e37-83d9-5c47e2ca5881-utilities\") pod \"community-operators-lwj6s\" (UID: \"975a4e53-58dd-4e37-83d9-5c47e2ca5881\") " pod="openshift-marketplace/community-operators-lwj6s" Sep 30 01:18:33 crc kubenswrapper[4809]: I0930 01:18:33.689371 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/975a4e53-58dd-4e37-83d9-5c47e2ca5881-catalog-content\") pod \"community-operators-lwj6s\" (UID: \"975a4e53-58dd-4e37-83d9-5c47e2ca5881\") " pod="openshift-marketplace/community-operators-lwj6s" Sep 30 01:18:33 crc kubenswrapper[4809]: I0930 01:18:33.689877 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/975a4e53-58dd-4e37-83d9-5c47e2ca5881-utilities\") pod \"community-operators-lwj6s\" (UID: \"975a4e53-58dd-4e37-83d9-5c47e2ca5881\") " pod="openshift-marketplace/community-operators-lwj6s" Sep 30 01:18:33 crc kubenswrapper[4809]: I0930 01:18:33.690030 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/975a4e53-58dd-4e37-83d9-5c47e2ca5881-catalog-content\") pod \"community-operators-lwj6s\" (UID: \"975a4e53-58dd-4e37-83d9-5c47e2ca5881\") " pod="openshift-marketplace/community-operators-lwj6s" Sep 30 01:18:33 crc kubenswrapper[4809]: I0930 01:18:33.713796 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mn477\" (UniqueName: \"kubernetes.io/projected/975a4e53-58dd-4e37-83d9-5c47e2ca5881-kube-api-access-mn477\") pod \"community-operators-lwj6s\" (UID: \"975a4e53-58dd-4e37-83d9-5c47e2ca5881\") " pod="openshift-marketplace/community-operators-lwj6s" Sep 30 01:18:33 crc kubenswrapper[4809]: I0930 01:18:33.842586 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lwj6s" Sep 30 01:18:34 crc kubenswrapper[4809]: I0930 01:18:34.400450 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lwj6s"] Sep 30 01:18:34 crc kubenswrapper[4809]: W0930 01:18:34.405205 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod975a4e53_58dd_4e37_83d9_5c47e2ca5881.slice/crio-fc036b72d00a79cae37848729cb3684d1ac44fc12196af72e83beca5de29095b WatchSource:0}: Error finding container fc036b72d00a79cae37848729cb3684d1ac44fc12196af72e83beca5de29095b: Status 404 returned error can't find the container with id fc036b72d00a79cae37848729cb3684d1ac44fc12196af72e83beca5de29095b Sep 30 01:18:35 crc kubenswrapper[4809]: I0930 01:18:35.110176 4809 generic.go:334] "Generic (PLEG): container finished" podID="975a4e53-58dd-4e37-83d9-5c47e2ca5881" containerID="5619c7b54eb71c0c0a92ab3d66827e76947f3ca65fe536be5ad5aa52993380d6" exitCode=0 Sep 30 01:18:35 crc kubenswrapper[4809]: I0930 01:18:35.110260 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lwj6s" event={"ID":"975a4e53-58dd-4e37-83d9-5c47e2ca5881","Type":"ContainerDied","Data":"5619c7b54eb71c0c0a92ab3d66827e76947f3ca65fe536be5ad5aa52993380d6"} Sep 30 01:18:35 crc kubenswrapper[4809]: I0930 01:18:35.111468 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lwj6s" event={"ID":"975a4e53-58dd-4e37-83d9-5c47e2ca5881","Type":"ContainerStarted","Data":"fc036b72d00a79cae37848729cb3684d1ac44fc12196af72e83beca5de29095b"} Sep 30 01:18:37 crc kubenswrapper[4809]: I0930 01:18:37.138767 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lwj6s" event={"ID":"975a4e53-58dd-4e37-83d9-5c47e2ca5881","Type":"ContainerStarted","Data":"9ebb07afe1f4830e4d82a5b4f2340bc01be2f445d60801c4655f2ca7506ebb5a"} Sep 30 01:18:38 crc kubenswrapper[4809]: I0930 01:18:38.148811 4809 generic.go:334] "Generic (PLEG): container finished" podID="975a4e53-58dd-4e37-83d9-5c47e2ca5881" containerID="9ebb07afe1f4830e4d82a5b4f2340bc01be2f445d60801c4655f2ca7506ebb5a" exitCode=0 Sep 30 01:18:38 crc kubenswrapper[4809]: I0930 01:18:38.148879 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lwj6s" event={"ID":"975a4e53-58dd-4e37-83d9-5c47e2ca5881","Type":"ContainerDied","Data":"9ebb07afe1f4830e4d82a5b4f2340bc01be2f445d60801c4655f2ca7506ebb5a"} Sep 30 01:18:39 crc kubenswrapper[4809]: I0930 01:18:39.162594 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lwj6s" event={"ID":"975a4e53-58dd-4e37-83d9-5c47e2ca5881","Type":"ContainerStarted","Data":"0084d41d92494f3e4820eb530749af4b469acab0d5c861ae1903f82a6c5bb3f3"} Sep 30 01:18:39 crc kubenswrapper[4809]: I0930 01:18:39.188775 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lwj6s" podStartSLOduration=2.574841541 podStartE2EDuration="6.188757334s" podCreationTimestamp="2025-09-30 01:18:33 +0000 UTC" firstStartedPulling="2025-09-30 01:18:35.115052854 +0000 UTC m=+4166.151302302" lastFinishedPulling="2025-09-30 01:18:38.728968687 +0000 UTC m=+4169.765218095" observedRunningTime="2025-09-30 01:18:39.181534066 +0000 UTC m=+4170.217783494" watchObservedRunningTime="2025-09-30 01:18:39.188757334 +0000 UTC m=+4170.225006742" Sep 30 01:18:43 crc kubenswrapper[4809]: I0930 01:18:43.843085 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lwj6s" Sep 30 01:18:43 crc kubenswrapper[4809]: I0930 01:18:43.843515 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lwj6s" Sep 30 01:18:44 crc kubenswrapper[4809]: I0930 01:18:44.890080 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-lwj6s" podUID="975a4e53-58dd-4e37-83d9-5c47e2ca5881" containerName="registry-server" probeResult="failure" output=< Sep 30 01:18:44 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 01:18:44 crc kubenswrapper[4809]: > Sep 30 01:18:53 crc kubenswrapper[4809]: I0930 01:18:53.932872 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lwj6s" Sep 30 01:18:54 crc kubenswrapper[4809]: I0930 01:18:54.019601 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lwj6s" Sep 30 01:18:54 crc kubenswrapper[4809]: I0930 01:18:54.182542 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lwj6s"] Sep 30 01:18:55 crc kubenswrapper[4809]: I0930 01:18:55.361044 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lwj6s" podUID="975a4e53-58dd-4e37-83d9-5c47e2ca5881" containerName="registry-server" containerID="cri-o://0084d41d92494f3e4820eb530749af4b469acab0d5c861ae1903f82a6c5bb3f3" gracePeriod=2 Sep 30 01:18:55 crc kubenswrapper[4809]: I0930 01:18:55.885103 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lwj6s" Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.005079 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/975a4e53-58dd-4e37-83d9-5c47e2ca5881-utilities\") pod \"975a4e53-58dd-4e37-83d9-5c47e2ca5881\" (UID: \"975a4e53-58dd-4e37-83d9-5c47e2ca5881\") " Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.005281 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/975a4e53-58dd-4e37-83d9-5c47e2ca5881-catalog-content\") pod \"975a4e53-58dd-4e37-83d9-5c47e2ca5881\" (UID: \"975a4e53-58dd-4e37-83d9-5c47e2ca5881\") " Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.005374 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mn477\" (UniqueName: \"kubernetes.io/projected/975a4e53-58dd-4e37-83d9-5c47e2ca5881-kube-api-access-mn477\") pod \"975a4e53-58dd-4e37-83d9-5c47e2ca5881\" (UID: \"975a4e53-58dd-4e37-83d9-5c47e2ca5881\") " Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.005953 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/975a4e53-58dd-4e37-83d9-5c47e2ca5881-utilities" (OuterVolumeSpecName: "utilities") pod "975a4e53-58dd-4e37-83d9-5c47e2ca5881" (UID: "975a4e53-58dd-4e37-83d9-5c47e2ca5881"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.012048 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/975a4e53-58dd-4e37-83d9-5c47e2ca5881-kube-api-access-mn477" (OuterVolumeSpecName: "kube-api-access-mn477") pod "975a4e53-58dd-4e37-83d9-5c47e2ca5881" (UID: "975a4e53-58dd-4e37-83d9-5c47e2ca5881"). InnerVolumeSpecName "kube-api-access-mn477". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.048663 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/975a4e53-58dd-4e37-83d9-5c47e2ca5881-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "975a4e53-58dd-4e37-83d9-5c47e2ca5881" (UID: "975a4e53-58dd-4e37-83d9-5c47e2ca5881"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.107229 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/975a4e53-58dd-4e37-83d9-5c47e2ca5881-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.107259 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mn477\" (UniqueName: \"kubernetes.io/projected/975a4e53-58dd-4e37-83d9-5c47e2ca5881-kube-api-access-mn477\") on node \"crc\" DevicePath \"\"" Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.107270 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/975a4e53-58dd-4e37-83d9-5c47e2ca5881-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.381461 4809 generic.go:334] "Generic (PLEG): container finished" podID="975a4e53-58dd-4e37-83d9-5c47e2ca5881" containerID="0084d41d92494f3e4820eb530749af4b469acab0d5c861ae1903f82a6c5bb3f3" exitCode=0 Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.381524 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lwj6s" event={"ID":"975a4e53-58dd-4e37-83d9-5c47e2ca5881","Type":"ContainerDied","Data":"0084d41d92494f3e4820eb530749af4b469acab0d5c861ae1903f82a6c5bb3f3"} Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.381556 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lwj6s" Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.381578 4809 scope.go:117] "RemoveContainer" containerID="0084d41d92494f3e4820eb530749af4b469acab0d5c861ae1903f82a6c5bb3f3" Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.381563 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lwj6s" event={"ID":"975a4e53-58dd-4e37-83d9-5c47e2ca5881","Type":"ContainerDied","Data":"fc036b72d00a79cae37848729cb3684d1ac44fc12196af72e83beca5de29095b"} Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.429096 4809 scope.go:117] "RemoveContainer" containerID="9ebb07afe1f4830e4d82a5b4f2340bc01be2f445d60801c4655f2ca7506ebb5a" Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.440862 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lwj6s"] Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.451105 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lwj6s"] Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.461411 4809 scope.go:117] "RemoveContainer" containerID="5619c7b54eb71c0c0a92ab3d66827e76947f3ca65fe536be5ad5aa52993380d6" Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.518706 4809 scope.go:117] "RemoveContainer" containerID="0084d41d92494f3e4820eb530749af4b469acab0d5c861ae1903f82a6c5bb3f3" Sep 30 01:18:56 crc kubenswrapper[4809]: E0930 01:18:56.519267 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0084d41d92494f3e4820eb530749af4b469acab0d5c861ae1903f82a6c5bb3f3\": container with ID starting with 0084d41d92494f3e4820eb530749af4b469acab0d5c861ae1903f82a6c5bb3f3 not found: ID does not exist" containerID="0084d41d92494f3e4820eb530749af4b469acab0d5c861ae1903f82a6c5bb3f3" Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.519299 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0084d41d92494f3e4820eb530749af4b469acab0d5c861ae1903f82a6c5bb3f3"} err="failed to get container status \"0084d41d92494f3e4820eb530749af4b469acab0d5c861ae1903f82a6c5bb3f3\": rpc error: code = NotFound desc = could not find container \"0084d41d92494f3e4820eb530749af4b469acab0d5c861ae1903f82a6c5bb3f3\": container with ID starting with 0084d41d92494f3e4820eb530749af4b469acab0d5c861ae1903f82a6c5bb3f3 not found: ID does not exist" Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.519320 4809 scope.go:117] "RemoveContainer" containerID="9ebb07afe1f4830e4d82a5b4f2340bc01be2f445d60801c4655f2ca7506ebb5a" Sep 30 01:18:56 crc kubenswrapper[4809]: E0930 01:18:56.519730 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ebb07afe1f4830e4d82a5b4f2340bc01be2f445d60801c4655f2ca7506ebb5a\": container with ID starting with 9ebb07afe1f4830e4d82a5b4f2340bc01be2f445d60801c4655f2ca7506ebb5a not found: ID does not exist" containerID="9ebb07afe1f4830e4d82a5b4f2340bc01be2f445d60801c4655f2ca7506ebb5a" Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.519804 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ebb07afe1f4830e4d82a5b4f2340bc01be2f445d60801c4655f2ca7506ebb5a"} err="failed to get container status \"9ebb07afe1f4830e4d82a5b4f2340bc01be2f445d60801c4655f2ca7506ebb5a\": rpc error: code = NotFound desc = could not find container \"9ebb07afe1f4830e4d82a5b4f2340bc01be2f445d60801c4655f2ca7506ebb5a\": container with ID starting with 9ebb07afe1f4830e4d82a5b4f2340bc01be2f445d60801c4655f2ca7506ebb5a not found: ID does not exist" Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.519821 4809 scope.go:117] "RemoveContainer" containerID="5619c7b54eb71c0c0a92ab3d66827e76947f3ca65fe536be5ad5aa52993380d6" Sep 30 01:18:56 crc kubenswrapper[4809]: E0930 01:18:56.520223 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5619c7b54eb71c0c0a92ab3d66827e76947f3ca65fe536be5ad5aa52993380d6\": container with ID starting with 5619c7b54eb71c0c0a92ab3d66827e76947f3ca65fe536be5ad5aa52993380d6 not found: ID does not exist" containerID="5619c7b54eb71c0c0a92ab3d66827e76947f3ca65fe536be5ad5aa52993380d6" Sep 30 01:18:56 crc kubenswrapper[4809]: I0930 01:18:56.520274 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5619c7b54eb71c0c0a92ab3d66827e76947f3ca65fe536be5ad5aa52993380d6"} err="failed to get container status \"5619c7b54eb71c0c0a92ab3d66827e76947f3ca65fe536be5ad5aa52993380d6\": rpc error: code = NotFound desc = could not find container \"5619c7b54eb71c0c0a92ab3d66827e76947f3ca65fe536be5ad5aa52993380d6\": container with ID starting with 5619c7b54eb71c0c0a92ab3d66827e76947f3ca65fe536be5ad5aa52993380d6 not found: ID does not exist" Sep 30 01:18:57 crc kubenswrapper[4809]: I0930 01:18:57.704451 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="975a4e53-58dd-4e37-83d9-5c47e2ca5881" path="/var/lib/kubelet/pods/975a4e53-58dd-4e37-83d9-5c47e2ca5881/volumes" Sep 30 01:19:20 crc kubenswrapper[4809]: I0930 01:19:20.722310 4809 generic.go:334] "Generic (PLEG): container finished" podID="a976aeec-864a-445c-8fc2-5e5d53332dce" containerID="89cb7400d0c4d89d5532400aa321f6641c507c738d12635fc940c80a0955aad9" exitCode=0 Sep 30 01:19:20 crc kubenswrapper[4809]: I0930 01:19:20.722414 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" event={"ID":"a976aeec-864a-445c-8fc2-5e5d53332dce","Type":"ContainerDied","Data":"89cb7400d0c4d89d5532400aa321f6641c507c738d12635fc940c80a0955aad9"} Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.218687 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.234102 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-cell1-compute-config-1\") pod \"a976aeec-864a-445c-8fc2-5e5d53332dce\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.234242 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-ceph\") pod \"a976aeec-864a-445c-8fc2-5e5d53332dce\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.234277 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vjwcp\" (UniqueName: \"kubernetes.io/projected/a976aeec-864a-445c-8fc2-5e5d53332dce-kube-api-access-vjwcp\") pod \"a976aeec-864a-445c-8fc2-5e5d53332dce\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.234412 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-inventory\") pod \"a976aeec-864a-445c-8fc2-5e5d53332dce\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.234470 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-cell1-compute-config-0\") pod \"a976aeec-864a-445c-8fc2-5e5d53332dce\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.234564 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-custom-ceph-combined-ca-bundle\") pod \"a976aeec-864a-445c-8fc2-5e5d53332dce\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.235381 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-migration-ssh-key-1\") pod \"a976aeec-864a-445c-8fc2-5e5d53332dce\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.235417 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-extra-config-0\") pod \"a976aeec-864a-445c-8fc2-5e5d53332dce\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.235456 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/a976aeec-864a-445c-8fc2-5e5d53332dce-ceph-nova-0\") pod \"a976aeec-864a-445c-8fc2-5e5d53332dce\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.235497 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-ssh-key\") pod \"a976aeec-864a-445c-8fc2-5e5d53332dce\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.235556 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-migration-ssh-key-0\") pod \"a976aeec-864a-445c-8fc2-5e5d53332dce\" (UID: \"a976aeec-864a-445c-8fc2-5e5d53332dce\") " Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.248473 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-ceph" (OuterVolumeSpecName: "ceph") pod "a976aeec-864a-445c-8fc2-5e5d53332dce" (UID: "a976aeec-864a-445c-8fc2-5e5d53332dce"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.250738 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a976aeec-864a-445c-8fc2-5e5d53332dce-kube-api-access-vjwcp" (OuterVolumeSpecName: "kube-api-access-vjwcp") pod "a976aeec-864a-445c-8fc2-5e5d53332dce" (UID: "a976aeec-864a-445c-8fc2-5e5d53332dce"). InnerVolumeSpecName "kube-api-access-vjwcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.250837 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-custom-ceph-combined-ca-bundle" (OuterVolumeSpecName: "nova-custom-ceph-combined-ca-bundle") pod "a976aeec-864a-445c-8fc2-5e5d53332dce" (UID: "a976aeec-864a-445c-8fc2-5e5d53332dce"). InnerVolumeSpecName "nova-custom-ceph-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.279095 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a976aeec-864a-445c-8fc2-5e5d53332dce-ceph-nova-0" (OuterVolumeSpecName: "ceph-nova-0") pod "a976aeec-864a-445c-8fc2-5e5d53332dce" (UID: "a976aeec-864a-445c-8fc2-5e5d53332dce"). InnerVolumeSpecName "ceph-nova-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.282791 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "a976aeec-864a-445c-8fc2-5e5d53332dce" (UID: "a976aeec-864a-445c-8fc2-5e5d53332dce"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.294520 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-inventory" (OuterVolumeSpecName: "inventory") pod "a976aeec-864a-445c-8fc2-5e5d53332dce" (UID: "a976aeec-864a-445c-8fc2-5e5d53332dce"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.299579 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "a976aeec-864a-445c-8fc2-5e5d53332dce" (UID: "a976aeec-864a-445c-8fc2-5e5d53332dce"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.299865 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "a976aeec-864a-445c-8fc2-5e5d53332dce" (UID: "a976aeec-864a-445c-8fc2-5e5d53332dce"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.311696 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "a976aeec-864a-445c-8fc2-5e5d53332dce" (UID: "a976aeec-864a-445c-8fc2-5e5d53332dce"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.316746 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a976aeec-864a-445c-8fc2-5e5d53332dce" (UID: "a976aeec-864a-445c-8fc2-5e5d53332dce"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.317164 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "a976aeec-864a-445c-8fc2-5e5d53332dce" (UID: "a976aeec-864a-445c-8fc2-5e5d53332dce"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.339277 4809 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.339310 4809 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.339319 4809 reconciler_common.go:293] "Volume detached for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/a976aeec-864a-445c-8fc2-5e5d53332dce-ceph-nova-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.339329 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.339337 4809 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.339345 4809 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.339354 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.339362 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vjwcp\" (UniqueName: \"kubernetes.io/projected/a976aeec-864a-445c-8fc2-5e5d53332dce-kube-api-access-vjwcp\") on node \"crc\" DevicePath \"\"" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.339370 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.339377 4809 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.339385 4809 reconciler_common.go:293] "Volume detached for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a976aeec-864a-445c-8fc2-5e5d53332dce-nova-custom-ceph-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.753201 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" event={"ID":"a976aeec-864a-445c-8fc2-5e5d53332dce","Type":"ContainerDied","Data":"5f86e0f9474e032b0a5f822e3ea1d4be7838d3fc76b3c27b1512d515dbef5421"} Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.753239 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f86e0f9474e032b0a5f822e3ea1d4be7838d3fc76b3c27b1512d515dbef5421" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.753300 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.857154 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87"] Sep 30 01:19:22 crc kubenswrapper[4809]: E0930 01:19:22.857864 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="975a4e53-58dd-4e37-83d9-5c47e2ca5881" containerName="extract-content" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.857896 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="975a4e53-58dd-4e37-83d9-5c47e2ca5881" containerName="extract-content" Sep 30 01:19:22 crc kubenswrapper[4809]: E0930 01:19:22.857944 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="975a4e53-58dd-4e37-83d9-5c47e2ca5881" containerName="extract-utilities" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.857957 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="975a4e53-58dd-4e37-83d9-5c47e2ca5881" containerName="extract-utilities" Sep 30 01:19:22 crc kubenswrapper[4809]: E0930 01:19:22.858062 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a976aeec-864a-445c-8fc2-5e5d53332dce" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.858077 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a976aeec-864a-445c-8fc2-5e5d53332dce" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Sep 30 01:19:22 crc kubenswrapper[4809]: E0930 01:19:22.858106 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="975a4e53-58dd-4e37-83d9-5c47e2ca5881" containerName="registry-server" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.858118 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="975a4e53-58dd-4e37-83d9-5c47e2ca5881" containerName="registry-server" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.858463 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="975a4e53-58dd-4e37-83d9-5c47e2ca5881" containerName="registry-server" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.858512 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="a976aeec-864a-445c-8fc2-5e5d53332dce" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.859859 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.863401 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.863415 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.863805 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.864104 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.868153 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87"] Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.872394 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 01:19:22 crc kubenswrapper[4809]: I0930 01:19:22.873728 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.052888 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceph\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.052928 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.052957 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.053040 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.053058 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.053146 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.053164 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pngx\" (UniqueName: \"kubernetes.io/projected/a004127c-f068-4f63-89a4-689cfec52df1-kube-api-access-4pngx\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.053200 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.155392 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceph\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.155715 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.155818 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.155932 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.156004 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.156672 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.156770 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pngx\" (UniqueName: \"kubernetes.io/projected/a004127c-f068-4f63-89a4-689cfec52df1-kube-api-access-4pngx\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.156935 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.705809 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceph\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.714747 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.714997 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.716498 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.716965 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pngx\" (UniqueName: \"kubernetes.io/projected/a004127c-f068-4f63-89a4-689cfec52df1-kube-api-access-4pngx\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.717019 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.725138 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.725826 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mzf87\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:23 crc kubenswrapper[4809]: I0930 01:19:23.781739 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:19:24 crc kubenswrapper[4809]: I0930 01:19:24.377654 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87"] Sep 30 01:19:24 crc kubenswrapper[4809]: I0930 01:19:24.383920 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 01:19:24 crc kubenswrapper[4809]: I0930 01:19:24.774675 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" event={"ID":"a004127c-f068-4f63-89a4-689cfec52df1","Type":"ContainerStarted","Data":"aadd66bc8b05acbfea9d7bb53655c0148b06329e4588aa77abe14cfe616f734b"} Sep 30 01:19:25 crc kubenswrapper[4809]: I0930 01:19:25.784751 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" event={"ID":"a004127c-f068-4f63-89a4-689cfec52df1","Type":"ContainerStarted","Data":"4fba2c43f7dd8c388e5a3af0dfd74598f30a64fb976ef96513cc789931cca9cc"} Sep 30 01:19:25 crc kubenswrapper[4809]: I0930 01:19:25.816600 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" podStartSLOduration=3.407839733 podStartE2EDuration="3.816565241s" podCreationTimestamp="2025-09-30 01:19:22 +0000 UTC" firstStartedPulling="2025-09-30 01:19:24.383606152 +0000 UTC m=+4215.419855570" lastFinishedPulling="2025-09-30 01:19:24.79233167 +0000 UTC m=+4215.828581078" observedRunningTime="2025-09-30 01:19:25.803053091 +0000 UTC m=+4216.839302499" watchObservedRunningTime="2025-09-30 01:19:25.816565241 +0000 UTC m=+4216.852814659" Sep 30 01:19:55 crc kubenswrapper[4809]: I0930 01:19:55.325197 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:19:55 crc kubenswrapper[4809]: I0930 01:19:55.325667 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:20:11 crc kubenswrapper[4809]: I0930 01:20:11.611052 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hbb8l"] Sep 30 01:20:11 crc kubenswrapper[4809]: I0930 01:20:11.615053 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hbb8l" Sep 30 01:20:11 crc kubenswrapper[4809]: I0930 01:20:11.638836 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hbb8l"] Sep 30 01:20:11 crc kubenswrapper[4809]: I0930 01:20:11.701549 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4426988-2dc4-41a3-b91f-a79871b38d29-utilities\") pod \"redhat-operators-hbb8l\" (UID: \"c4426988-2dc4-41a3-b91f-a79871b38d29\") " pod="openshift-marketplace/redhat-operators-hbb8l" Sep 30 01:20:11 crc kubenswrapper[4809]: I0930 01:20:11.701613 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4426988-2dc4-41a3-b91f-a79871b38d29-catalog-content\") pod \"redhat-operators-hbb8l\" (UID: \"c4426988-2dc4-41a3-b91f-a79871b38d29\") " pod="openshift-marketplace/redhat-operators-hbb8l" Sep 30 01:20:11 crc kubenswrapper[4809]: I0930 01:20:11.701874 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vsgk\" (UniqueName: \"kubernetes.io/projected/c4426988-2dc4-41a3-b91f-a79871b38d29-kube-api-access-6vsgk\") pod \"redhat-operators-hbb8l\" (UID: \"c4426988-2dc4-41a3-b91f-a79871b38d29\") " pod="openshift-marketplace/redhat-operators-hbb8l" Sep 30 01:20:11 crc kubenswrapper[4809]: I0930 01:20:11.803846 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vsgk\" (UniqueName: \"kubernetes.io/projected/c4426988-2dc4-41a3-b91f-a79871b38d29-kube-api-access-6vsgk\") pod \"redhat-operators-hbb8l\" (UID: \"c4426988-2dc4-41a3-b91f-a79871b38d29\") " pod="openshift-marketplace/redhat-operators-hbb8l" Sep 30 01:20:11 crc kubenswrapper[4809]: I0930 01:20:11.803935 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4426988-2dc4-41a3-b91f-a79871b38d29-utilities\") pod \"redhat-operators-hbb8l\" (UID: \"c4426988-2dc4-41a3-b91f-a79871b38d29\") " pod="openshift-marketplace/redhat-operators-hbb8l" Sep 30 01:20:11 crc kubenswrapper[4809]: I0930 01:20:11.803980 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4426988-2dc4-41a3-b91f-a79871b38d29-catalog-content\") pod \"redhat-operators-hbb8l\" (UID: \"c4426988-2dc4-41a3-b91f-a79871b38d29\") " pod="openshift-marketplace/redhat-operators-hbb8l" Sep 30 01:20:11 crc kubenswrapper[4809]: I0930 01:20:11.804634 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4426988-2dc4-41a3-b91f-a79871b38d29-catalog-content\") pod \"redhat-operators-hbb8l\" (UID: \"c4426988-2dc4-41a3-b91f-a79871b38d29\") " pod="openshift-marketplace/redhat-operators-hbb8l" Sep 30 01:20:11 crc kubenswrapper[4809]: I0930 01:20:11.804853 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4426988-2dc4-41a3-b91f-a79871b38d29-utilities\") pod \"redhat-operators-hbb8l\" (UID: \"c4426988-2dc4-41a3-b91f-a79871b38d29\") " pod="openshift-marketplace/redhat-operators-hbb8l" Sep 30 01:20:11 crc kubenswrapper[4809]: I0930 01:20:11.823942 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vsgk\" (UniqueName: \"kubernetes.io/projected/c4426988-2dc4-41a3-b91f-a79871b38d29-kube-api-access-6vsgk\") pod \"redhat-operators-hbb8l\" (UID: \"c4426988-2dc4-41a3-b91f-a79871b38d29\") " pod="openshift-marketplace/redhat-operators-hbb8l" Sep 30 01:20:11 crc kubenswrapper[4809]: I0930 01:20:11.949521 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hbb8l" Sep 30 01:20:12 crc kubenswrapper[4809]: I0930 01:20:12.504366 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hbb8l"] Sep 30 01:20:13 crc kubenswrapper[4809]: I0930 01:20:13.322734 4809 generic.go:334] "Generic (PLEG): container finished" podID="c4426988-2dc4-41a3-b91f-a79871b38d29" containerID="378730e9013e5ff427b8ce3f038f033676b5d63970263efd7c211028a8255c4d" exitCode=0 Sep 30 01:20:13 crc kubenswrapper[4809]: I0930 01:20:13.322922 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hbb8l" event={"ID":"c4426988-2dc4-41a3-b91f-a79871b38d29","Type":"ContainerDied","Data":"378730e9013e5ff427b8ce3f038f033676b5d63970263efd7c211028a8255c4d"} Sep 30 01:20:13 crc kubenswrapper[4809]: I0930 01:20:13.323108 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hbb8l" event={"ID":"c4426988-2dc4-41a3-b91f-a79871b38d29","Type":"ContainerStarted","Data":"dfb02a57fc14d07e0fa44acbb732544424cc0fd073ee8b46246922cf9c0ce532"} Sep 30 01:20:15 crc kubenswrapper[4809]: I0930 01:20:15.348192 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hbb8l" event={"ID":"c4426988-2dc4-41a3-b91f-a79871b38d29","Type":"ContainerStarted","Data":"79e4313cfa1372fe28112380dac618d923fcdebe5ef6a62f9bcefd76b4347337"} Sep 30 01:20:18 crc kubenswrapper[4809]: I0930 01:20:18.384118 4809 generic.go:334] "Generic (PLEG): container finished" podID="c4426988-2dc4-41a3-b91f-a79871b38d29" containerID="79e4313cfa1372fe28112380dac618d923fcdebe5ef6a62f9bcefd76b4347337" exitCode=0 Sep 30 01:20:18 crc kubenswrapper[4809]: I0930 01:20:18.384219 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hbb8l" event={"ID":"c4426988-2dc4-41a3-b91f-a79871b38d29","Type":"ContainerDied","Data":"79e4313cfa1372fe28112380dac618d923fcdebe5ef6a62f9bcefd76b4347337"} Sep 30 01:20:19 crc kubenswrapper[4809]: I0930 01:20:19.399265 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hbb8l" event={"ID":"c4426988-2dc4-41a3-b91f-a79871b38d29","Type":"ContainerStarted","Data":"c0d06d49756f854de7871e951a4105f210d63d99319800fd38b4b26ef1cc9614"} Sep 30 01:20:19 crc kubenswrapper[4809]: I0930 01:20:19.428496 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hbb8l" podStartSLOduration=2.9531893399999998 podStartE2EDuration="8.428470621s" podCreationTimestamp="2025-09-30 01:20:11 +0000 UTC" firstStartedPulling="2025-09-30 01:20:13.325501773 +0000 UTC m=+4264.361751181" lastFinishedPulling="2025-09-30 01:20:18.800783034 +0000 UTC m=+4269.837032462" observedRunningTime="2025-09-30 01:20:19.424870093 +0000 UTC m=+4270.461119501" watchObservedRunningTime="2025-09-30 01:20:19.428470621 +0000 UTC m=+4270.464720049" Sep 30 01:20:21 crc kubenswrapper[4809]: I0930 01:20:21.950231 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hbb8l" Sep 30 01:20:21 crc kubenswrapper[4809]: I0930 01:20:21.950933 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hbb8l" Sep 30 01:20:23 crc kubenswrapper[4809]: I0930 01:20:23.024114 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hbb8l" podUID="c4426988-2dc4-41a3-b91f-a79871b38d29" containerName="registry-server" probeResult="failure" output=< Sep 30 01:20:23 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 01:20:23 crc kubenswrapper[4809]: > Sep 30 01:20:25 crc kubenswrapper[4809]: I0930 01:20:25.324680 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:20:25 crc kubenswrapper[4809]: I0930 01:20:25.325081 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:20:33 crc kubenswrapper[4809]: I0930 01:20:33.024108 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hbb8l" podUID="c4426988-2dc4-41a3-b91f-a79871b38d29" containerName="registry-server" probeResult="failure" output=< Sep 30 01:20:33 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 01:20:33 crc kubenswrapper[4809]: > Sep 30 01:20:42 crc kubenswrapper[4809]: I0930 01:20:42.016007 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hbb8l" Sep 30 01:20:42 crc kubenswrapper[4809]: I0930 01:20:42.088249 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hbb8l" Sep 30 01:20:42 crc kubenswrapper[4809]: I0930 01:20:42.813596 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hbb8l"] Sep 30 01:20:43 crc kubenswrapper[4809]: I0930 01:20:43.670593 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hbb8l" podUID="c4426988-2dc4-41a3-b91f-a79871b38d29" containerName="registry-server" containerID="cri-o://c0d06d49756f854de7871e951a4105f210d63d99319800fd38b4b26ef1cc9614" gracePeriod=2 Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.251591 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hbb8l" Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.383846 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vsgk\" (UniqueName: \"kubernetes.io/projected/c4426988-2dc4-41a3-b91f-a79871b38d29-kube-api-access-6vsgk\") pod \"c4426988-2dc4-41a3-b91f-a79871b38d29\" (UID: \"c4426988-2dc4-41a3-b91f-a79871b38d29\") " Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.383938 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4426988-2dc4-41a3-b91f-a79871b38d29-utilities\") pod \"c4426988-2dc4-41a3-b91f-a79871b38d29\" (UID: \"c4426988-2dc4-41a3-b91f-a79871b38d29\") " Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.384262 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4426988-2dc4-41a3-b91f-a79871b38d29-catalog-content\") pod \"c4426988-2dc4-41a3-b91f-a79871b38d29\" (UID: \"c4426988-2dc4-41a3-b91f-a79871b38d29\") " Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.384863 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4426988-2dc4-41a3-b91f-a79871b38d29-utilities" (OuterVolumeSpecName: "utilities") pod "c4426988-2dc4-41a3-b91f-a79871b38d29" (UID: "c4426988-2dc4-41a3-b91f-a79871b38d29"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.385360 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4426988-2dc4-41a3-b91f-a79871b38d29-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.393220 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4426988-2dc4-41a3-b91f-a79871b38d29-kube-api-access-6vsgk" (OuterVolumeSpecName: "kube-api-access-6vsgk") pod "c4426988-2dc4-41a3-b91f-a79871b38d29" (UID: "c4426988-2dc4-41a3-b91f-a79871b38d29"). InnerVolumeSpecName "kube-api-access-6vsgk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.474138 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4426988-2dc4-41a3-b91f-a79871b38d29-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c4426988-2dc4-41a3-b91f-a79871b38d29" (UID: "c4426988-2dc4-41a3-b91f-a79871b38d29"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.488527 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vsgk\" (UniqueName: \"kubernetes.io/projected/c4426988-2dc4-41a3-b91f-a79871b38d29-kube-api-access-6vsgk\") on node \"crc\" DevicePath \"\"" Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.488585 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4426988-2dc4-41a3-b91f-a79871b38d29-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.686875 4809 generic.go:334] "Generic (PLEG): container finished" podID="c4426988-2dc4-41a3-b91f-a79871b38d29" containerID="c0d06d49756f854de7871e951a4105f210d63d99319800fd38b4b26ef1cc9614" exitCode=0 Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.686926 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hbb8l" event={"ID":"c4426988-2dc4-41a3-b91f-a79871b38d29","Type":"ContainerDied","Data":"c0d06d49756f854de7871e951a4105f210d63d99319800fd38b4b26ef1cc9614"} Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.686982 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hbb8l" Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.687004 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hbb8l" event={"ID":"c4426988-2dc4-41a3-b91f-a79871b38d29","Type":"ContainerDied","Data":"dfb02a57fc14d07e0fa44acbb732544424cc0fd073ee8b46246922cf9c0ce532"} Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.687034 4809 scope.go:117] "RemoveContainer" containerID="c0d06d49756f854de7871e951a4105f210d63d99319800fd38b4b26ef1cc9614" Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.729587 4809 scope.go:117] "RemoveContainer" containerID="79e4313cfa1372fe28112380dac618d923fcdebe5ef6a62f9bcefd76b4347337" Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.741877 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hbb8l"] Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.752870 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hbb8l"] Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.757627 4809 scope.go:117] "RemoveContainer" containerID="378730e9013e5ff427b8ce3f038f033676b5d63970263efd7c211028a8255c4d" Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.820597 4809 scope.go:117] "RemoveContainer" containerID="c0d06d49756f854de7871e951a4105f210d63d99319800fd38b4b26ef1cc9614" Sep 30 01:20:44 crc kubenswrapper[4809]: E0930 01:20:44.822068 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0d06d49756f854de7871e951a4105f210d63d99319800fd38b4b26ef1cc9614\": container with ID starting with c0d06d49756f854de7871e951a4105f210d63d99319800fd38b4b26ef1cc9614 not found: ID does not exist" containerID="c0d06d49756f854de7871e951a4105f210d63d99319800fd38b4b26ef1cc9614" Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.822124 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0d06d49756f854de7871e951a4105f210d63d99319800fd38b4b26ef1cc9614"} err="failed to get container status \"c0d06d49756f854de7871e951a4105f210d63d99319800fd38b4b26ef1cc9614\": rpc error: code = NotFound desc = could not find container \"c0d06d49756f854de7871e951a4105f210d63d99319800fd38b4b26ef1cc9614\": container with ID starting with c0d06d49756f854de7871e951a4105f210d63d99319800fd38b4b26ef1cc9614 not found: ID does not exist" Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.822158 4809 scope.go:117] "RemoveContainer" containerID="79e4313cfa1372fe28112380dac618d923fcdebe5ef6a62f9bcefd76b4347337" Sep 30 01:20:44 crc kubenswrapper[4809]: E0930 01:20:44.822613 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79e4313cfa1372fe28112380dac618d923fcdebe5ef6a62f9bcefd76b4347337\": container with ID starting with 79e4313cfa1372fe28112380dac618d923fcdebe5ef6a62f9bcefd76b4347337 not found: ID does not exist" containerID="79e4313cfa1372fe28112380dac618d923fcdebe5ef6a62f9bcefd76b4347337" Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.822661 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79e4313cfa1372fe28112380dac618d923fcdebe5ef6a62f9bcefd76b4347337"} err="failed to get container status \"79e4313cfa1372fe28112380dac618d923fcdebe5ef6a62f9bcefd76b4347337\": rpc error: code = NotFound desc = could not find container \"79e4313cfa1372fe28112380dac618d923fcdebe5ef6a62f9bcefd76b4347337\": container with ID starting with 79e4313cfa1372fe28112380dac618d923fcdebe5ef6a62f9bcefd76b4347337 not found: ID does not exist" Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.822692 4809 scope.go:117] "RemoveContainer" containerID="378730e9013e5ff427b8ce3f038f033676b5d63970263efd7c211028a8255c4d" Sep 30 01:20:44 crc kubenswrapper[4809]: E0930 01:20:44.823084 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"378730e9013e5ff427b8ce3f038f033676b5d63970263efd7c211028a8255c4d\": container with ID starting with 378730e9013e5ff427b8ce3f038f033676b5d63970263efd7c211028a8255c4d not found: ID does not exist" containerID="378730e9013e5ff427b8ce3f038f033676b5d63970263efd7c211028a8255c4d" Sep 30 01:20:44 crc kubenswrapper[4809]: I0930 01:20:44.823170 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"378730e9013e5ff427b8ce3f038f033676b5d63970263efd7c211028a8255c4d"} err="failed to get container status \"378730e9013e5ff427b8ce3f038f033676b5d63970263efd7c211028a8255c4d\": rpc error: code = NotFound desc = could not find container \"378730e9013e5ff427b8ce3f038f033676b5d63970263efd7c211028a8255c4d\": container with ID starting with 378730e9013e5ff427b8ce3f038f033676b5d63970263efd7c211028a8255c4d not found: ID does not exist" Sep 30 01:20:45 crc kubenswrapper[4809]: I0930 01:20:45.702066 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4426988-2dc4-41a3-b91f-a79871b38d29" path="/var/lib/kubelet/pods/c4426988-2dc4-41a3-b91f-a79871b38d29/volumes" Sep 30 01:20:55 crc kubenswrapper[4809]: I0930 01:20:55.325616 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:20:55 crc kubenswrapper[4809]: I0930 01:20:55.326382 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:20:55 crc kubenswrapper[4809]: I0930 01:20:55.326497 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 01:20:55 crc kubenswrapper[4809]: I0930 01:20:55.328356 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 01:20:55 crc kubenswrapper[4809]: I0930 01:20:55.328497 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" gracePeriod=600 Sep 30 01:20:55 crc kubenswrapper[4809]: E0930 01:20:55.463884 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:20:55 crc kubenswrapper[4809]: I0930 01:20:55.848845 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" exitCode=0 Sep 30 01:20:55 crc kubenswrapper[4809]: I0930 01:20:55.848917 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1"} Sep 30 01:20:55 crc kubenswrapper[4809]: I0930 01:20:55.849074 4809 scope.go:117] "RemoveContainer" containerID="be39589ae6a698fecbd8e348e88db132298ce2169316f433161d54ed425c1718" Sep 30 01:20:55 crc kubenswrapper[4809]: I0930 01:20:55.850095 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:20:55 crc kubenswrapper[4809]: E0930 01:20:55.850433 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:20:57 crc kubenswrapper[4809]: I0930 01:20:57.996734 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-w67vk"] Sep 30 01:20:57 crc kubenswrapper[4809]: E0930 01:20:57.997849 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4426988-2dc4-41a3-b91f-a79871b38d29" containerName="registry-server" Sep 30 01:20:57 crc kubenswrapper[4809]: I0930 01:20:57.997869 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4426988-2dc4-41a3-b91f-a79871b38d29" containerName="registry-server" Sep 30 01:20:57 crc kubenswrapper[4809]: E0930 01:20:57.997894 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4426988-2dc4-41a3-b91f-a79871b38d29" containerName="extract-content" Sep 30 01:20:57 crc kubenswrapper[4809]: I0930 01:20:57.997902 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4426988-2dc4-41a3-b91f-a79871b38d29" containerName="extract-content" Sep 30 01:20:57 crc kubenswrapper[4809]: E0930 01:20:57.997954 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4426988-2dc4-41a3-b91f-a79871b38d29" containerName="extract-utilities" Sep 30 01:20:57 crc kubenswrapper[4809]: I0930 01:20:57.997962 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4426988-2dc4-41a3-b91f-a79871b38d29" containerName="extract-utilities" Sep 30 01:20:57 crc kubenswrapper[4809]: I0930 01:20:57.998251 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4426988-2dc4-41a3-b91f-a79871b38d29" containerName="registry-server" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.000874 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w67vk" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.027855 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w67vk"] Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.050445 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c2379b4-cf30-4631-a312-858b9a44e140-utilities\") pod \"redhat-marketplace-w67vk\" (UID: \"6c2379b4-cf30-4631-a312-858b9a44e140\") " pod="openshift-marketplace/redhat-marketplace-w67vk" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.050502 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57l49\" (UniqueName: \"kubernetes.io/projected/6c2379b4-cf30-4631-a312-858b9a44e140-kube-api-access-57l49\") pod \"redhat-marketplace-w67vk\" (UID: \"6c2379b4-cf30-4631-a312-858b9a44e140\") " pod="openshift-marketplace/redhat-marketplace-w67vk" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.050555 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c2379b4-cf30-4631-a312-858b9a44e140-catalog-content\") pod \"redhat-marketplace-w67vk\" (UID: \"6c2379b4-cf30-4631-a312-858b9a44e140\") " pod="openshift-marketplace/redhat-marketplace-w67vk" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.151478 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c2379b4-cf30-4631-a312-858b9a44e140-utilities\") pod \"redhat-marketplace-w67vk\" (UID: \"6c2379b4-cf30-4631-a312-858b9a44e140\") " pod="openshift-marketplace/redhat-marketplace-w67vk" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.151531 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57l49\" (UniqueName: \"kubernetes.io/projected/6c2379b4-cf30-4631-a312-858b9a44e140-kube-api-access-57l49\") pod \"redhat-marketplace-w67vk\" (UID: \"6c2379b4-cf30-4631-a312-858b9a44e140\") " pod="openshift-marketplace/redhat-marketplace-w67vk" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.151584 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c2379b4-cf30-4631-a312-858b9a44e140-catalog-content\") pod \"redhat-marketplace-w67vk\" (UID: \"6c2379b4-cf30-4631-a312-858b9a44e140\") " pod="openshift-marketplace/redhat-marketplace-w67vk" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.152243 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c2379b4-cf30-4631-a312-858b9a44e140-utilities\") pod \"redhat-marketplace-w67vk\" (UID: \"6c2379b4-cf30-4631-a312-858b9a44e140\") " pod="openshift-marketplace/redhat-marketplace-w67vk" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.152244 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c2379b4-cf30-4631-a312-858b9a44e140-catalog-content\") pod \"redhat-marketplace-w67vk\" (UID: \"6c2379b4-cf30-4631-a312-858b9a44e140\") " pod="openshift-marketplace/redhat-marketplace-w67vk" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.186323 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57l49\" (UniqueName: \"kubernetes.io/projected/6c2379b4-cf30-4631-a312-858b9a44e140-kube-api-access-57l49\") pod \"redhat-marketplace-w67vk\" (UID: \"6c2379b4-cf30-4631-a312-858b9a44e140\") " pod="openshift-marketplace/redhat-marketplace-w67vk" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.330102 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w67vk" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.593595 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-m2zsw"] Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.596699 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m2zsw" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.617511 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m2zsw"] Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.663369 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8b3beb7-60d8-42f4-98d1-0f709692405f-utilities\") pod \"certified-operators-m2zsw\" (UID: \"d8b3beb7-60d8-42f4-98d1-0f709692405f\") " pod="openshift-marketplace/certified-operators-m2zsw" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.663528 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8b3beb7-60d8-42f4-98d1-0f709692405f-catalog-content\") pod \"certified-operators-m2zsw\" (UID: \"d8b3beb7-60d8-42f4-98d1-0f709692405f\") " pod="openshift-marketplace/certified-operators-m2zsw" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.663657 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x59zf\" (UniqueName: \"kubernetes.io/projected/d8b3beb7-60d8-42f4-98d1-0f709692405f-kube-api-access-x59zf\") pod \"certified-operators-m2zsw\" (UID: \"d8b3beb7-60d8-42f4-98d1-0f709692405f\") " pod="openshift-marketplace/certified-operators-m2zsw" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.765126 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8b3beb7-60d8-42f4-98d1-0f709692405f-catalog-content\") pod \"certified-operators-m2zsw\" (UID: \"d8b3beb7-60d8-42f4-98d1-0f709692405f\") " pod="openshift-marketplace/certified-operators-m2zsw" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.765586 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x59zf\" (UniqueName: \"kubernetes.io/projected/d8b3beb7-60d8-42f4-98d1-0f709692405f-kube-api-access-x59zf\") pod \"certified-operators-m2zsw\" (UID: \"d8b3beb7-60d8-42f4-98d1-0f709692405f\") " pod="openshift-marketplace/certified-operators-m2zsw" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.765756 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8b3beb7-60d8-42f4-98d1-0f709692405f-catalog-content\") pod \"certified-operators-m2zsw\" (UID: \"d8b3beb7-60d8-42f4-98d1-0f709692405f\") " pod="openshift-marketplace/certified-operators-m2zsw" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.765767 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8b3beb7-60d8-42f4-98d1-0f709692405f-utilities\") pod \"certified-operators-m2zsw\" (UID: \"d8b3beb7-60d8-42f4-98d1-0f709692405f\") " pod="openshift-marketplace/certified-operators-m2zsw" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.766131 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8b3beb7-60d8-42f4-98d1-0f709692405f-utilities\") pod \"certified-operators-m2zsw\" (UID: \"d8b3beb7-60d8-42f4-98d1-0f709692405f\") " pod="openshift-marketplace/certified-operators-m2zsw" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.787570 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x59zf\" (UniqueName: \"kubernetes.io/projected/d8b3beb7-60d8-42f4-98d1-0f709692405f-kube-api-access-x59zf\") pod \"certified-operators-m2zsw\" (UID: \"d8b3beb7-60d8-42f4-98d1-0f709692405f\") " pod="openshift-marketplace/certified-operators-m2zsw" Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.810035 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w67vk"] Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.899949 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w67vk" event={"ID":"6c2379b4-cf30-4631-a312-858b9a44e140","Type":"ContainerStarted","Data":"510199d6fde8fd029b92c214eb8ca9bc6d96e50d8d4664cf65bf97bfe85822de"} Sep 30 01:20:58 crc kubenswrapper[4809]: I0930 01:20:58.933043 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m2zsw" Sep 30 01:20:59 crc kubenswrapper[4809]: I0930 01:20:59.254828 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m2zsw"] Sep 30 01:20:59 crc kubenswrapper[4809]: W0930 01:20:59.257348 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd8b3beb7_60d8_42f4_98d1_0f709692405f.slice/crio-9d945b809972d68b90d2238e14cbf36317fbf091e34ce7a8d24b9791d62ec6a9 WatchSource:0}: Error finding container 9d945b809972d68b90d2238e14cbf36317fbf091e34ce7a8d24b9791d62ec6a9: Status 404 returned error can't find the container with id 9d945b809972d68b90d2238e14cbf36317fbf091e34ce7a8d24b9791d62ec6a9 Sep 30 01:20:59 crc kubenswrapper[4809]: I0930 01:20:59.916380 4809 generic.go:334] "Generic (PLEG): container finished" podID="6c2379b4-cf30-4631-a312-858b9a44e140" containerID="1cb44cafaedd5e24cd8360dc56de428d2f5ac7e421686ec9d0ca34f3db9f23e3" exitCode=0 Sep 30 01:20:59 crc kubenswrapper[4809]: I0930 01:20:59.916453 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w67vk" event={"ID":"6c2379b4-cf30-4631-a312-858b9a44e140","Type":"ContainerDied","Data":"1cb44cafaedd5e24cd8360dc56de428d2f5ac7e421686ec9d0ca34f3db9f23e3"} Sep 30 01:20:59 crc kubenswrapper[4809]: I0930 01:20:59.921489 4809 generic.go:334] "Generic (PLEG): container finished" podID="d8b3beb7-60d8-42f4-98d1-0f709692405f" containerID="7513f7ce21ef575c999b7c9c179ea4322acc0d4d0cf789eca3ed1d0bb86253ad" exitCode=0 Sep 30 01:20:59 crc kubenswrapper[4809]: I0930 01:20:59.921526 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m2zsw" event={"ID":"d8b3beb7-60d8-42f4-98d1-0f709692405f","Type":"ContainerDied","Data":"7513f7ce21ef575c999b7c9c179ea4322acc0d4d0cf789eca3ed1d0bb86253ad"} Sep 30 01:20:59 crc kubenswrapper[4809]: I0930 01:20:59.921554 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m2zsw" event={"ID":"d8b3beb7-60d8-42f4-98d1-0f709692405f","Type":"ContainerStarted","Data":"9d945b809972d68b90d2238e14cbf36317fbf091e34ce7a8d24b9791d62ec6a9"} Sep 30 01:21:00 crc kubenswrapper[4809]: I0930 01:21:00.934601 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w67vk" event={"ID":"6c2379b4-cf30-4631-a312-858b9a44e140","Type":"ContainerStarted","Data":"dfaa77e057475100e7c4b1b8ad49900c229b82f67b3eff169d0174a5ba3d7d1f"} Sep 30 01:21:01 crc kubenswrapper[4809]: I0930 01:21:01.961981 4809 generic.go:334] "Generic (PLEG): container finished" podID="6c2379b4-cf30-4631-a312-858b9a44e140" containerID="dfaa77e057475100e7c4b1b8ad49900c229b82f67b3eff169d0174a5ba3d7d1f" exitCode=0 Sep 30 01:21:01 crc kubenswrapper[4809]: I0930 01:21:01.962088 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w67vk" event={"ID":"6c2379b4-cf30-4631-a312-858b9a44e140","Type":"ContainerDied","Data":"dfaa77e057475100e7c4b1b8ad49900c229b82f67b3eff169d0174a5ba3d7d1f"} Sep 30 01:21:01 crc kubenswrapper[4809]: I0930 01:21:01.969579 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m2zsw" event={"ID":"d8b3beb7-60d8-42f4-98d1-0f709692405f","Type":"ContainerStarted","Data":"adefd96b2788813aa43fa38894c9d4f25a66cac6512ec9643bddd70d37df059c"} Sep 30 01:21:02 crc kubenswrapper[4809]: I0930 01:21:02.985900 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w67vk" event={"ID":"6c2379b4-cf30-4631-a312-858b9a44e140","Type":"ContainerStarted","Data":"d1993b066c2f24513adec348b18d29a0111cc570ce56a74c80c88591e7a3e9cd"} Sep 30 01:21:02 crc kubenswrapper[4809]: I0930 01:21:02.990878 4809 generic.go:334] "Generic (PLEG): container finished" podID="d8b3beb7-60d8-42f4-98d1-0f709692405f" containerID="adefd96b2788813aa43fa38894c9d4f25a66cac6512ec9643bddd70d37df059c" exitCode=0 Sep 30 01:21:02 crc kubenswrapper[4809]: I0930 01:21:02.990922 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m2zsw" event={"ID":"d8b3beb7-60d8-42f4-98d1-0f709692405f","Type":"ContainerDied","Data":"adefd96b2788813aa43fa38894c9d4f25a66cac6512ec9643bddd70d37df059c"} Sep 30 01:21:03 crc kubenswrapper[4809]: I0930 01:21:03.028882 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-w67vk" podStartSLOduration=3.487829405 podStartE2EDuration="6.028863604s" podCreationTimestamp="2025-09-30 01:20:57 +0000 UTC" firstStartedPulling="2025-09-30 01:20:59.918240449 +0000 UTC m=+4310.954489897" lastFinishedPulling="2025-09-30 01:21:02.459274688 +0000 UTC m=+4313.495524096" observedRunningTime="2025-09-30 01:21:03.02251382 +0000 UTC m=+4314.058763318" watchObservedRunningTime="2025-09-30 01:21:03.028863604 +0000 UTC m=+4314.065113012" Sep 30 01:21:04 crc kubenswrapper[4809]: I0930 01:21:04.004123 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m2zsw" event={"ID":"d8b3beb7-60d8-42f4-98d1-0f709692405f","Type":"ContainerStarted","Data":"5e671fcc6b9cf415247335d4145dd3ea52f58f2266881c6852132dc5c42587ef"} Sep 30 01:21:04 crc kubenswrapper[4809]: I0930 01:21:04.036465 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-m2zsw" podStartSLOduration=2.487126066 podStartE2EDuration="6.036442189s" podCreationTimestamp="2025-09-30 01:20:58 +0000 UTC" firstStartedPulling="2025-09-30 01:20:59.923659828 +0000 UTC m=+4310.959909236" lastFinishedPulling="2025-09-30 01:21:03.472975941 +0000 UTC m=+4314.509225359" observedRunningTime="2025-09-30 01:21:04.024151103 +0000 UTC m=+4315.060400551" watchObservedRunningTime="2025-09-30 01:21:04.036442189 +0000 UTC m=+4315.072691607" Sep 30 01:21:06 crc kubenswrapper[4809]: I0930 01:21:06.691530 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:21:06 crc kubenswrapper[4809]: E0930 01:21:06.692033 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:21:08 crc kubenswrapper[4809]: I0930 01:21:08.331242 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-w67vk" Sep 30 01:21:08 crc kubenswrapper[4809]: I0930 01:21:08.331784 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-w67vk" Sep 30 01:21:08 crc kubenswrapper[4809]: I0930 01:21:08.389591 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-w67vk" Sep 30 01:21:08 crc kubenswrapper[4809]: I0930 01:21:08.933289 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-m2zsw" Sep 30 01:21:08 crc kubenswrapper[4809]: I0930 01:21:08.933672 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-m2zsw" Sep 30 01:21:09 crc kubenswrapper[4809]: I0930 01:21:09.004492 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-m2zsw" Sep 30 01:21:09 crc kubenswrapper[4809]: I0930 01:21:09.116344 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-m2zsw" Sep 30 01:21:09 crc kubenswrapper[4809]: I0930 01:21:09.118253 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-w67vk" Sep 30 01:21:09 crc kubenswrapper[4809]: I0930 01:21:09.977466 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m2zsw"] Sep 30 01:21:11 crc kubenswrapper[4809]: I0930 01:21:11.090743 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-m2zsw" podUID="d8b3beb7-60d8-42f4-98d1-0f709692405f" containerName="registry-server" containerID="cri-o://5e671fcc6b9cf415247335d4145dd3ea52f58f2266881c6852132dc5c42587ef" gracePeriod=2 Sep 30 01:21:11 crc kubenswrapper[4809]: I0930 01:21:11.381422 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w67vk"] Sep 30 01:21:11 crc kubenswrapper[4809]: I0930 01:21:11.382097 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-w67vk" podUID="6c2379b4-cf30-4631-a312-858b9a44e140" containerName="registry-server" containerID="cri-o://d1993b066c2f24513adec348b18d29a0111cc570ce56a74c80c88591e7a3e9cd" gracePeriod=2 Sep 30 01:21:11 crc kubenswrapper[4809]: I0930 01:21:11.630701 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m2zsw" Sep 30 01:21:11 crc kubenswrapper[4809]: I0930 01:21:11.775179 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8b3beb7-60d8-42f4-98d1-0f709692405f-catalog-content\") pod \"d8b3beb7-60d8-42f4-98d1-0f709692405f\" (UID: \"d8b3beb7-60d8-42f4-98d1-0f709692405f\") " Sep 30 01:21:11 crc kubenswrapper[4809]: I0930 01:21:11.775559 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8b3beb7-60d8-42f4-98d1-0f709692405f-utilities\") pod \"d8b3beb7-60d8-42f4-98d1-0f709692405f\" (UID: \"d8b3beb7-60d8-42f4-98d1-0f709692405f\") " Sep 30 01:21:11 crc kubenswrapper[4809]: I0930 01:21:11.775694 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x59zf\" (UniqueName: \"kubernetes.io/projected/d8b3beb7-60d8-42f4-98d1-0f709692405f-kube-api-access-x59zf\") pod \"d8b3beb7-60d8-42f4-98d1-0f709692405f\" (UID: \"d8b3beb7-60d8-42f4-98d1-0f709692405f\") " Sep 30 01:21:11 crc kubenswrapper[4809]: I0930 01:21:11.776560 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8b3beb7-60d8-42f4-98d1-0f709692405f-utilities" (OuterVolumeSpecName: "utilities") pod "d8b3beb7-60d8-42f4-98d1-0f709692405f" (UID: "d8b3beb7-60d8-42f4-98d1-0f709692405f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:21:11 crc kubenswrapper[4809]: I0930 01:21:11.782565 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8b3beb7-60d8-42f4-98d1-0f709692405f-kube-api-access-x59zf" (OuterVolumeSpecName: "kube-api-access-x59zf") pod "d8b3beb7-60d8-42f4-98d1-0f709692405f" (UID: "d8b3beb7-60d8-42f4-98d1-0f709692405f"). InnerVolumeSpecName "kube-api-access-x59zf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:21:11 crc kubenswrapper[4809]: I0930 01:21:11.836230 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8b3beb7-60d8-42f4-98d1-0f709692405f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d8b3beb7-60d8-42f4-98d1-0f709692405f" (UID: "d8b3beb7-60d8-42f4-98d1-0f709692405f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:21:11 crc kubenswrapper[4809]: I0930 01:21:11.878937 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8b3beb7-60d8-42f4-98d1-0f709692405f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:21:11 crc kubenswrapper[4809]: I0930 01:21:11.878976 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8b3beb7-60d8-42f4-98d1-0f709692405f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:21:11 crc kubenswrapper[4809]: I0930 01:21:11.878994 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x59zf\" (UniqueName: \"kubernetes.io/projected/d8b3beb7-60d8-42f4-98d1-0f709692405f-kube-api-access-x59zf\") on node \"crc\" DevicePath \"\"" Sep 30 01:21:11 crc kubenswrapper[4809]: I0930 01:21:11.897202 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w67vk" Sep 30 01:21:11 crc kubenswrapper[4809]: I0930 01:21:11.980572 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c2379b4-cf30-4631-a312-858b9a44e140-utilities\") pod \"6c2379b4-cf30-4631-a312-858b9a44e140\" (UID: \"6c2379b4-cf30-4631-a312-858b9a44e140\") " Sep 30 01:21:11 crc kubenswrapper[4809]: I0930 01:21:11.981001 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57l49\" (UniqueName: \"kubernetes.io/projected/6c2379b4-cf30-4631-a312-858b9a44e140-kube-api-access-57l49\") pod \"6c2379b4-cf30-4631-a312-858b9a44e140\" (UID: \"6c2379b4-cf30-4631-a312-858b9a44e140\") " Sep 30 01:21:11 crc kubenswrapper[4809]: I0930 01:21:11.981096 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c2379b4-cf30-4631-a312-858b9a44e140-catalog-content\") pod \"6c2379b4-cf30-4631-a312-858b9a44e140\" (UID: \"6c2379b4-cf30-4631-a312-858b9a44e140\") " Sep 30 01:21:11 crc kubenswrapper[4809]: I0930 01:21:11.981478 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c2379b4-cf30-4631-a312-858b9a44e140-utilities" (OuterVolumeSpecName: "utilities") pod "6c2379b4-cf30-4631-a312-858b9a44e140" (UID: "6c2379b4-cf30-4631-a312-858b9a44e140"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:21:11 crc kubenswrapper[4809]: I0930 01:21:11.981634 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c2379b4-cf30-4631-a312-858b9a44e140-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:21:11 crc kubenswrapper[4809]: I0930 01:21:11.986022 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c2379b4-cf30-4631-a312-858b9a44e140-kube-api-access-57l49" (OuterVolumeSpecName: "kube-api-access-57l49") pod "6c2379b4-cf30-4631-a312-858b9a44e140" (UID: "6c2379b4-cf30-4631-a312-858b9a44e140"). InnerVolumeSpecName "kube-api-access-57l49". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.003184 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c2379b4-cf30-4631-a312-858b9a44e140-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6c2379b4-cf30-4631-a312-858b9a44e140" (UID: "6c2379b4-cf30-4631-a312-858b9a44e140"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.084056 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57l49\" (UniqueName: \"kubernetes.io/projected/6c2379b4-cf30-4631-a312-858b9a44e140-kube-api-access-57l49\") on node \"crc\" DevicePath \"\"" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.084122 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c2379b4-cf30-4631-a312-858b9a44e140-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.106934 4809 generic.go:334] "Generic (PLEG): container finished" podID="6c2379b4-cf30-4631-a312-858b9a44e140" containerID="d1993b066c2f24513adec348b18d29a0111cc570ce56a74c80c88591e7a3e9cd" exitCode=0 Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.106980 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w67vk" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.106995 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w67vk" event={"ID":"6c2379b4-cf30-4631-a312-858b9a44e140","Type":"ContainerDied","Data":"d1993b066c2f24513adec348b18d29a0111cc570ce56a74c80c88591e7a3e9cd"} Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.107065 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w67vk" event={"ID":"6c2379b4-cf30-4631-a312-858b9a44e140","Type":"ContainerDied","Data":"510199d6fde8fd029b92c214eb8ca9bc6d96e50d8d4664cf65bf97bfe85822de"} Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.107100 4809 scope.go:117] "RemoveContainer" containerID="d1993b066c2f24513adec348b18d29a0111cc570ce56a74c80c88591e7a3e9cd" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.115056 4809 generic.go:334] "Generic (PLEG): container finished" podID="d8b3beb7-60d8-42f4-98d1-0f709692405f" containerID="5e671fcc6b9cf415247335d4145dd3ea52f58f2266881c6852132dc5c42587ef" exitCode=0 Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.115125 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m2zsw" event={"ID":"d8b3beb7-60d8-42f4-98d1-0f709692405f","Type":"ContainerDied","Data":"5e671fcc6b9cf415247335d4145dd3ea52f58f2266881c6852132dc5c42587ef"} Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.115159 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m2zsw" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.115177 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m2zsw" event={"ID":"d8b3beb7-60d8-42f4-98d1-0f709692405f","Type":"ContainerDied","Data":"9d945b809972d68b90d2238e14cbf36317fbf091e34ce7a8d24b9791d62ec6a9"} Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.159633 4809 scope.go:117] "RemoveContainer" containerID="dfaa77e057475100e7c4b1b8ad49900c229b82f67b3eff169d0174a5ba3d7d1f" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.167814 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w67vk"] Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.186039 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-w67vk"] Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.199521 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m2zsw"] Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.202961 4809 scope.go:117] "RemoveContainer" containerID="1cb44cafaedd5e24cd8360dc56de428d2f5ac7e421686ec9d0ca34f3db9f23e3" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.209445 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-m2zsw"] Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.270747 4809 scope.go:117] "RemoveContainer" containerID="d1993b066c2f24513adec348b18d29a0111cc570ce56a74c80c88591e7a3e9cd" Sep 30 01:21:12 crc kubenswrapper[4809]: E0930 01:21:12.271364 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1993b066c2f24513adec348b18d29a0111cc570ce56a74c80c88591e7a3e9cd\": container with ID starting with d1993b066c2f24513adec348b18d29a0111cc570ce56a74c80c88591e7a3e9cd not found: ID does not exist" containerID="d1993b066c2f24513adec348b18d29a0111cc570ce56a74c80c88591e7a3e9cd" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.271411 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1993b066c2f24513adec348b18d29a0111cc570ce56a74c80c88591e7a3e9cd"} err="failed to get container status \"d1993b066c2f24513adec348b18d29a0111cc570ce56a74c80c88591e7a3e9cd\": rpc error: code = NotFound desc = could not find container \"d1993b066c2f24513adec348b18d29a0111cc570ce56a74c80c88591e7a3e9cd\": container with ID starting with d1993b066c2f24513adec348b18d29a0111cc570ce56a74c80c88591e7a3e9cd not found: ID does not exist" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.271439 4809 scope.go:117] "RemoveContainer" containerID="dfaa77e057475100e7c4b1b8ad49900c229b82f67b3eff169d0174a5ba3d7d1f" Sep 30 01:21:12 crc kubenswrapper[4809]: E0930 01:21:12.272074 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dfaa77e057475100e7c4b1b8ad49900c229b82f67b3eff169d0174a5ba3d7d1f\": container with ID starting with dfaa77e057475100e7c4b1b8ad49900c229b82f67b3eff169d0174a5ba3d7d1f not found: ID does not exist" containerID="dfaa77e057475100e7c4b1b8ad49900c229b82f67b3eff169d0174a5ba3d7d1f" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.272136 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfaa77e057475100e7c4b1b8ad49900c229b82f67b3eff169d0174a5ba3d7d1f"} err="failed to get container status \"dfaa77e057475100e7c4b1b8ad49900c229b82f67b3eff169d0174a5ba3d7d1f\": rpc error: code = NotFound desc = could not find container \"dfaa77e057475100e7c4b1b8ad49900c229b82f67b3eff169d0174a5ba3d7d1f\": container with ID starting with dfaa77e057475100e7c4b1b8ad49900c229b82f67b3eff169d0174a5ba3d7d1f not found: ID does not exist" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.272181 4809 scope.go:117] "RemoveContainer" containerID="1cb44cafaedd5e24cd8360dc56de428d2f5ac7e421686ec9d0ca34f3db9f23e3" Sep 30 01:21:12 crc kubenswrapper[4809]: E0930 01:21:12.272760 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cb44cafaedd5e24cd8360dc56de428d2f5ac7e421686ec9d0ca34f3db9f23e3\": container with ID starting with 1cb44cafaedd5e24cd8360dc56de428d2f5ac7e421686ec9d0ca34f3db9f23e3 not found: ID does not exist" containerID="1cb44cafaedd5e24cd8360dc56de428d2f5ac7e421686ec9d0ca34f3db9f23e3" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.272803 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cb44cafaedd5e24cd8360dc56de428d2f5ac7e421686ec9d0ca34f3db9f23e3"} err="failed to get container status \"1cb44cafaedd5e24cd8360dc56de428d2f5ac7e421686ec9d0ca34f3db9f23e3\": rpc error: code = NotFound desc = could not find container \"1cb44cafaedd5e24cd8360dc56de428d2f5ac7e421686ec9d0ca34f3db9f23e3\": container with ID starting with 1cb44cafaedd5e24cd8360dc56de428d2f5ac7e421686ec9d0ca34f3db9f23e3 not found: ID does not exist" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.272854 4809 scope.go:117] "RemoveContainer" containerID="5e671fcc6b9cf415247335d4145dd3ea52f58f2266881c6852132dc5c42587ef" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.330829 4809 scope.go:117] "RemoveContainer" containerID="adefd96b2788813aa43fa38894c9d4f25a66cac6512ec9643bddd70d37df059c" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.351688 4809 scope.go:117] "RemoveContainer" containerID="7513f7ce21ef575c999b7c9c179ea4322acc0d4d0cf789eca3ed1d0bb86253ad" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.417631 4809 scope.go:117] "RemoveContainer" containerID="5e671fcc6b9cf415247335d4145dd3ea52f58f2266881c6852132dc5c42587ef" Sep 30 01:21:12 crc kubenswrapper[4809]: E0930 01:21:12.418501 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e671fcc6b9cf415247335d4145dd3ea52f58f2266881c6852132dc5c42587ef\": container with ID starting with 5e671fcc6b9cf415247335d4145dd3ea52f58f2266881c6852132dc5c42587ef not found: ID does not exist" containerID="5e671fcc6b9cf415247335d4145dd3ea52f58f2266881c6852132dc5c42587ef" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.418539 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e671fcc6b9cf415247335d4145dd3ea52f58f2266881c6852132dc5c42587ef"} err="failed to get container status \"5e671fcc6b9cf415247335d4145dd3ea52f58f2266881c6852132dc5c42587ef\": rpc error: code = NotFound desc = could not find container \"5e671fcc6b9cf415247335d4145dd3ea52f58f2266881c6852132dc5c42587ef\": container with ID starting with 5e671fcc6b9cf415247335d4145dd3ea52f58f2266881c6852132dc5c42587ef not found: ID does not exist" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.418566 4809 scope.go:117] "RemoveContainer" containerID="adefd96b2788813aa43fa38894c9d4f25a66cac6512ec9643bddd70d37df059c" Sep 30 01:21:12 crc kubenswrapper[4809]: E0930 01:21:12.419240 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"adefd96b2788813aa43fa38894c9d4f25a66cac6512ec9643bddd70d37df059c\": container with ID starting with adefd96b2788813aa43fa38894c9d4f25a66cac6512ec9643bddd70d37df059c not found: ID does not exist" containerID="adefd96b2788813aa43fa38894c9d4f25a66cac6512ec9643bddd70d37df059c" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.419270 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adefd96b2788813aa43fa38894c9d4f25a66cac6512ec9643bddd70d37df059c"} err="failed to get container status \"adefd96b2788813aa43fa38894c9d4f25a66cac6512ec9643bddd70d37df059c\": rpc error: code = NotFound desc = could not find container \"adefd96b2788813aa43fa38894c9d4f25a66cac6512ec9643bddd70d37df059c\": container with ID starting with adefd96b2788813aa43fa38894c9d4f25a66cac6512ec9643bddd70d37df059c not found: ID does not exist" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.419289 4809 scope.go:117] "RemoveContainer" containerID="7513f7ce21ef575c999b7c9c179ea4322acc0d4d0cf789eca3ed1d0bb86253ad" Sep 30 01:21:12 crc kubenswrapper[4809]: E0930 01:21:12.419791 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7513f7ce21ef575c999b7c9c179ea4322acc0d4d0cf789eca3ed1d0bb86253ad\": container with ID starting with 7513f7ce21ef575c999b7c9c179ea4322acc0d4d0cf789eca3ed1d0bb86253ad not found: ID does not exist" containerID="7513f7ce21ef575c999b7c9c179ea4322acc0d4d0cf789eca3ed1d0bb86253ad" Sep 30 01:21:12 crc kubenswrapper[4809]: I0930 01:21:12.419829 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7513f7ce21ef575c999b7c9c179ea4322acc0d4d0cf789eca3ed1d0bb86253ad"} err="failed to get container status \"7513f7ce21ef575c999b7c9c179ea4322acc0d4d0cf789eca3ed1d0bb86253ad\": rpc error: code = NotFound desc = could not find container \"7513f7ce21ef575c999b7c9c179ea4322acc0d4d0cf789eca3ed1d0bb86253ad\": container with ID starting with 7513f7ce21ef575c999b7c9c179ea4322acc0d4d0cf789eca3ed1d0bb86253ad not found: ID does not exist" Sep 30 01:21:13 crc kubenswrapper[4809]: I0930 01:21:13.708165 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c2379b4-cf30-4631-a312-858b9a44e140" path="/var/lib/kubelet/pods/6c2379b4-cf30-4631-a312-858b9a44e140/volumes" Sep 30 01:21:13 crc kubenswrapper[4809]: I0930 01:21:13.709859 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8b3beb7-60d8-42f4-98d1-0f709692405f" path="/var/lib/kubelet/pods/d8b3beb7-60d8-42f4-98d1-0f709692405f/volumes" Sep 30 01:21:18 crc kubenswrapper[4809]: I0930 01:21:18.692004 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:21:18 crc kubenswrapper[4809]: E0930 01:21:18.694186 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:21:33 crc kubenswrapper[4809]: I0930 01:21:33.690806 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:21:33 crc kubenswrapper[4809]: E0930 01:21:33.691715 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:21:45 crc kubenswrapper[4809]: I0930 01:21:45.691256 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:21:45 crc kubenswrapper[4809]: E0930 01:21:45.692055 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:21:57 crc kubenswrapper[4809]: I0930 01:21:57.691132 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:21:57 crc kubenswrapper[4809]: E0930 01:21:57.692183 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:22:10 crc kubenswrapper[4809]: I0930 01:22:10.700043 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:22:10 crc kubenswrapper[4809]: E0930 01:22:10.701364 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:22:21 crc kubenswrapper[4809]: I0930 01:22:21.695370 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:22:21 crc kubenswrapper[4809]: E0930 01:22:21.696861 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:22:32 crc kubenswrapper[4809]: I0930 01:22:32.691378 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:22:32 crc kubenswrapper[4809]: E0930 01:22:32.692321 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:22:44 crc kubenswrapper[4809]: I0930 01:22:44.691781 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:22:44 crc kubenswrapper[4809]: E0930 01:22:44.693141 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:22:56 crc kubenswrapper[4809]: I0930 01:22:56.691149 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:22:56 crc kubenswrapper[4809]: E0930 01:22:56.691912 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:23:05 crc kubenswrapper[4809]: I0930 01:23:05.436806 4809 generic.go:334] "Generic (PLEG): container finished" podID="a004127c-f068-4f63-89a4-689cfec52df1" containerID="4fba2c43f7dd8c388e5a3af0dfd74598f30a64fb976ef96513cc789931cca9cc" exitCode=0 Sep 30 01:23:05 crc kubenswrapper[4809]: I0930 01:23:05.436926 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" event={"ID":"a004127c-f068-4f63-89a4-689cfec52df1","Type":"ContainerDied","Data":"4fba2c43f7dd8c388e5a3af0dfd74598f30a64fb976ef96513cc789931cca9cc"} Sep 30 01:23:06 crc kubenswrapper[4809]: I0930 01:23:06.987302 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.068324 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceilometer-compute-config-data-2\") pod \"a004127c-f068-4f63-89a4-689cfec52df1\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.068370 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pngx\" (UniqueName: \"kubernetes.io/projected/a004127c-f068-4f63-89a4-689cfec52df1-kube-api-access-4pngx\") pod \"a004127c-f068-4f63-89a4-689cfec52df1\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.068418 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceilometer-compute-config-data-1\") pod \"a004127c-f068-4f63-89a4-689cfec52df1\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.068455 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceph\") pod \"a004127c-f068-4f63-89a4-689cfec52df1\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.068474 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ssh-key\") pod \"a004127c-f068-4f63-89a4-689cfec52df1\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.068623 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceilometer-compute-config-data-0\") pod \"a004127c-f068-4f63-89a4-689cfec52df1\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.068703 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-telemetry-combined-ca-bundle\") pod \"a004127c-f068-4f63-89a4-689cfec52df1\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.068754 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-inventory\") pod \"a004127c-f068-4f63-89a4-689cfec52df1\" (UID: \"a004127c-f068-4f63-89a4-689cfec52df1\") " Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.079058 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a004127c-f068-4f63-89a4-689cfec52df1-kube-api-access-4pngx" (OuterVolumeSpecName: "kube-api-access-4pngx") pod "a004127c-f068-4f63-89a4-689cfec52df1" (UID: "a004127c-f068-4f63-89a4-689cfec52df1"). InnerVolumeSpecName "kube-api-access-4pngx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.080039 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceph" (OuterVolumeSpecName: "ceph") pod "a004127c-f068-4f63-89a4-689cfec52df1" (UID: "a004127c-f068-4f63-89a4-689cfec52df1"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.095343 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "a004127c-f068-4f63-89a4-689cfec52df1" (UID: "a004127c-f068-4f63-89a4-689cfec52df1"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.105878 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a004127c-f068-4f63-89a4-689cfec52df1" (UID: "a004127c-f068-4f63-89a4-689cfec52df1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.122973 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "a004127c-f068-4f63-89a4-689cfec52df1" (UID: "a004127c-f068-4f63-89a4-689cfec52df1"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.125902 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-inventory" (OuterVolumeSpecName: "inventory") pod "a004127c-f068-4f63-89a4-689cfec52df1" (UID: "a004127c-f068-4f63-89a4-689cfec52df1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.129143 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "a004127c-f068-4f63-89a4-689cfec52df1" (UID: "a004127c-f068-4f63-89a4-689cfec52df1"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.151439 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "a004127c-f068-4f63-89a4-689cfec52df1" (UID: "a004127c-f068-4f63-89a4-689cfec52df1"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.171599 4809 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.171657 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.171673 4809 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.171687 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pngx\" (UniqueName: \"kubernetes.io/projected/a004127c-f068-4f63-89a4-689cfec52df1-kube-api-access-4pngx\") on node \"crc\" DevicePath \"\"" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.171699 4809 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.171712 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.171724 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.171736 4809 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a004127c-f068-4f63-89a4-689cfec52df1-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.465167 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" event={"ID":"a004127c-f068-4f63-89a4-689cfec52df1","Type":"ContainerDied","Data":"aadd66bc8b05acbfea9d7bb53655c0148b06329e4588aa77abe14cfe616f734b"} Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.465724 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aadd66bc8b05acbfea9d7bb53655c0148b06329e4588aa77abe14cfe616f734b" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.465245 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mzf87" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.756025 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq"] Sep 30 01:23:07 crc kubenswrapper[4809]: E0930 01:23:07.756585 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8b3beb7-60d8-42f4-98d1-0f709692405f" containerName="registry-server" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.756608 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8b3beb7-60d8-42f4-98d1-0f709692405f" containerName="registry-server" Sep 30 01:23:07 crc kubenswrapper[4809]: E0930 01:23:07.756625 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c2379b4-cf30-4631-a312-858b9a44e140" containerName="extract-utilities" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.756634 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c2379b4-cf30-4631-a312-858b9a44e140" containerName="extract-utilities" Sep 30 01:23:07 crc kubenswrapper[4809]: E0930 01:23:07.756684 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8b3beb7-60d8-42f4-98d1-0f709692405f" containerName="extract-content" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.756694 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8b3beb7-60d8-42f4-98d1-0f709692405f" containerName="extract-content" Sep 30 01:23:07 crc kubenswrapper[4809]: E0930 01:23:07.756714 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a004127c-f068-4f63-89a4-689cfec52df1" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.756727 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a004127c-f068-4f63-89a4-689cfec52df1" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 30 01:23:07 crc kubenswrapper[4809]: E0930 01:23:07.756748 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8b3beb7-60d8-42f4-98d1-0f709692405f" containerName="extract-utilities" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.756757 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8b3beb7-60d8-42f4-98d1-0f709692405f" containerName="extract-utilities" Sep 30 01:23:07 crc kubenswrapper[4809]: E0930 01:23:07.756777 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c2379b4-cf30-4631-a312-858b9a44e140" containerName="registry-server" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.756785 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c2379b4-cf30-4631-a312-858b9a44e140" containerName="registry-server" Sep 30 01:23:07 crc kubenswrapper[4809]: E0930 01:23:07.756809 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c2379b4-cf30-4631-a312-858b9a44e140" containerName="extract-content" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.756817 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c2379b4-cf30-4631-a312-858b9a44e140" containerName="extract-content" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.757116 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="a004127c-f068-4f63-89a4-689cfec52df1" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.757146 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c2379b4-cf30-4631-a312-858b9a44e140" containerName="registry-server" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.757192 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8b3beb7-60d8-42f4-98d1-0f709692405f" containerName="registry-server" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.758205 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.773440 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq"] Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.801319 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-ipmi-config-data" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.801594 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.801789 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.801932 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.802130 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.802274 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.897480 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fjzr\" (UniqueName: \"kubernetes.io/projected/87824ee6-3fd5-4157-afdb-4e524127dae8-kube-api-access-6fjzr\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.897629 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceph\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.897727 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-ipmi-config-data-0\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceilometer-ipmi-config-data-0\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.897782 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-ipmi-config-data-2\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceilometer-ipmi-config-data-2\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.897951 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-ipmi-config-data-1\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceilometer-ipmi-config-data-1\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.898057 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ssh-key\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.898128 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-telemetry-power-monitoring-combined-ca-bundle\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:07 crc kubenswrapper[4809]: I0930 01:23:07.898160 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-inventory\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:08 crc kubenswrapper[4809]: I0930 01:23:08.000123 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-ipmi-config-data-1\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceilometer-ipmi-config-data-1\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:08 crc kubenswrapper[4809]: I0930 01:23:08.000260 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ssh-key\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:08 crc kubenswrapper[4809]: I0930 01:23:08.000326 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-telemetry-power-monitoring-combined-ca-bundle\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:08 crc kubenswrapper[4809]: I0930 01:23:08.000445 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-inventory\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:08 crc kubenswrapper[4809]: I0930 01:23:08.000791 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fjzr\" (UniqueName: \"kubernetes.io/projected/87824ee6-3fd5-4157-afdb-4e524127dae8-kube-api-access-6fjzr\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:08 crc kubenswrapper[4809]: I0930 01:23:08.001565 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceph\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:08 crc kubenswrapper[4809]: I0930 01:23:08.001739 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-ipmi-config-data-0\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceilometer-ipmi-config-data-0\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:08 crc kubenswrapper[4809]: I0930 01:23:08.001813 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-ipmi-config-data-2\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceilometer-ipmi-config-data-2\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:08 crc kubenswrapper[4809]: I0930 01:23:08.006955 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceph\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:08 crc kubenswrapper[4809]: I0930 01:23:08.007125 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-telemetry-power-monitoring-combined-ca-bundle\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:08 crc kubenswrapper[4809]: I0930 01:23:08.007343 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ssh-key\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:08 crc kubenswrapper[4809]: I0930 01:23:08.008593 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-inventory\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:08 crc kubenswrapper[4809]: I0930 01:23:08.008702 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-ipmi-config-data-1\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceilometer-ipmi-config-data-1\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:08 crc kubenswrapper[4809]: I0930 01:23:08.018333 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-ipmi-config-data-0\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceilometer-ipmi-config-data-0\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:08 crc kubenswrapper[4809]: I0930 01:23:08.019908 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-ipmi-config-data-2\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceilometer-ipmi-config-data-2\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:08 crc kubenswrapper[4809]: I0930 01:23:08.022947 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fjzr\" (UniqueName: \"kubernetes.io/projected/87824ee6-3fd5-4157-afdb-4e524127dae8-kube-api-access-6fjzr\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:08 crc kubenswrapper[4809]: I0930 01:23:08.117907 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:23:08 crc kubenswrapper[4809]: I0930 01:23:08.746157 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq"] Sep 30 01:23:08 crc kubenswrapper[4809]: W0930 01:23:08.756313 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87824ee6_3fd5_4157_afdb_4e524127dae8.slice/crio-b553b935f07137e9778d7d87e26d6b8454c9a2a5805eea149ddd35c59d04bfbe WatchSource:0}: Error finding container b553b935f07137e9778d7d87e26d6b8454c9a2a5805eea149ddd35c59d04bfbe: Status 404 returned error can't find the container with id b553b935f07137e9778d7d87e26d6b8454c9a2a5805eea149ddd35c59d04bfbe Sep 30 01:23:09 crc kubenswrapper[4809]: I0930 01:23:09.496790 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" event={"ID":"87824ee6-3fd5-4157-afdb-4e524127dae8","Type":"ContainerStarted","Data":"b553b935f07137e9778d7d87e26d6b8454c9a2a5805eea149ddd35c59d04bfbe"} Sep 30 01:23:10 crc kubenswrapper[4809]: I0930 01:23:10.510062 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" event={"ID":"87824ee6-3fd5-4157-afdb-4e524127dae8","Type":"ContainerStarted","Data":"cef1876f2a75169a962c0882125ee94385310d1c9f97eb2372c3f3547cb44d28"} Sep 30 01:23:10 crc kubenswrapper[4809]: I0930 01:23:10.539543 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" podStartSLOduration=2.850145334 podStartE2EDuration="3.539500821s" podCreationTimestamp="2025-09-30 01:23:07 +0000 UTC" firstStartedPulling="2025-09-30 01:23:08.761111297 +0000 UTC m=+4439.797360745" lastFinishedPulling="2025-09-30 01:23:09.450466814 +0000 UTC m=+4440.486716232" observedRunningTime="2025-09-30 01:23:10.535104891 +0000 UTC m=+4441.571354329" watchObservedRunningTime="2025-09-30 01:23:10.539500821 +0000 UTC m=+4441.575750249" Sep 30 01:23:10 crc kubenswrapper[4809]: I0930 01:23:10.691482 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:23:10 crc kubenswrapper[4809]: E0930 01:23:10.691907 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:23:22 crc kubenswrapper[4809]: I0930 01:23:22.691692 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:23:22 crc kubenswrapper[4809]: E0930 01:23:22.692848 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:23:37 crc kubenswrapper[4809]: I0930 01:23:37.690874 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:23:37 crc kubenswrapper[4809]: E0930 01:23:37.692021 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:23:51 crc kubenswrapper[4809]: I0930 01:23:51.691282 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:23:51 crc kubenswrapper[4809]: E0930 01:23:51.692158 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:24:02 crc kubenswrapper[4809]: I0930 01:24:02.691901 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:24:02 crc kubenswrapper[4809]: E0930 01:24:02.692579 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:24:14 crc kubenswrapper[4809]: I0930 01:24:14.694359 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:24:14 crc kubenswrapper[4809]: E0930 01:24:14.695310 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:24:29 crc kubenswrapper[4809]: I0930 01:24:29.700865 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:24:29 crc kubenswrapper[4809]: E0930 01:24:29.707231 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:24:44 crc kubenswrapper[4809]: I0930 01:24:44.691758 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:24:44 crc kubenswrapper[4809]: E0930 01:24:44.693017 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:24:55 crc kubenswrapper[4809]: I0930 01:24:55.691594 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:24:55 crc kubenswrapper[4809]: E0930 01:24:55.692457 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:25:07 crc kubenswrapper[4809]: I0930 01:25:07.692480 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:25:07 crc kubenswrapper[4809]: E0930 01:25:07.698070 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:25:21 crc kubenswrapper[4809]: I0930 01:25:21.708942 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:25:21 crc kubenswrapper[4809]: E0930 01:25:21.710817 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:25:36 crc kubenswrapper[4809]: I0930 01:25:36.691292 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:25:36 crc kubenswrapper[4809]: E0930 01:25:36.692494 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:25:48 crc kubenswrapper[4809]: I0930 01:25:48.691318 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:25:48 crc kubenswrapper[4809]: E0930 01:25:48.693224 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:25:59 crc kubenswrapper[4809]: I0930 01:25:59.699326 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:26:00 crc kubenswrapper[4809]: I0930 01:26:00.480604 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"19a1442afdfa18c0d27f18c243884121c78e39e1fdc5c50afe899f2c22a5eea6"} Sep 30 01:26:03 crc kubenswrapper[4809]: I0930 01:26:03.519921 4809 generic.go:334] "Generic (PLEG): container finished" podID="87824ee6-3fd5-4157-afdb-4e524127dae8" containerID="cef1876f2a75169a962c0882125ee94385310d1c9f97eb2372c3f3547cb44d28" exitCode=0 Sep 30 01:26:03 crc kubenswrapper[4809]: I0930 01:26:03.520004 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" event={"ID":"87824ee6-3fd5-4157-afdb-4e524127dae8","Type":"ContainerDied","Data":"cef1876f2a75169a962c0882125ee94385310d1c9f97eb2372c3f3547cb44d28"} Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.075595 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.170465 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fjzr\" (UniqueName: \"kubernetes.io/projected/87824ee6-3fd5-4157-afdb-4e524127dae8-kube-api-access-6fjzr\") pod \"87824ee6-3fd5-4157-afdb-4e524127dae8\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.170574 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-inventory\") pod \"87824ee6-3fd5-4157-afdb-4e524127dae8\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.170738 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceph\") pod \"87824ee6-3fd5-4157-afdb-4e524127dae8\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.170836 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-ipmi-config-data-2\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceilometer-ipmi-config-data-2\") pod \"87824ee6-3fd5-4157-afdb-4e524127dae8\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.170868 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-ipmi-config-data-0\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceilometer-ipmi-config-data-0\") pod \"87824ee6-3fd5-4157-afdb-4e524127dae8\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.170918 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-ipmi-config-data-1\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceilometer-ipmi-config-data-1\") pod \"87824ee6-3fd5-4157-afdb-4e524127dae8\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.171045 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ssh-key\") pod \"87824ee6-3fd5-4157-afdb-4e524127dae8\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.171102 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-telemetry-power-monitoring-combined-ca-bundle\") pod \"87824ee6-3fd5-4157-afdb-4e524127dae8\" (UID: \"87824ee6-3fd5-4157-afdb-4e524127dae8\") " Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.180886 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceph" (OuterVolumeSpecName: "ceph") pod "87824ee6-3fd5-4157-afdb-4e524127dae8" (UID: "87824ee6-3fd5-4157-afdb-4e524127dae8"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.186506 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-telemetry-power-monitoring-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-power-monitoring-combined-ca-bundle") pod "87824ee6-3fd5-4157-afdb-4e524127dae8" (UID: "87824ee6-3fd5-4157-afdb-4e524127dae8"). InnerVolumeSpecName "telemetry-power-monitoring-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.186730 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87824ee6-3fd5-4157-afdb-4e524127dae8-kube-api-access-6fjzr" (OuterVolumeSpecName: "kube-api-access-6fjzr") pod "87824ee6-3fd5-4157-afdb-4e524127dae8" (UID: "87824ee6-3fd5-4157-afdb-4e524127dae8"). InnerVolumeSpecName "kube-api-access-6fjzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.203839 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceilometer-ipmi-config-data-0" (OuterVolumeSpecName: "ceilometer-ipmi-config-data-0") pod "87824ee6-3fd5-4157-afdb-4e524127dae8" (UID: "87824ee6-3fd5-4157-afdb-4e524127dae8"). InnerVolumeSpecName "ceilometer-ipmi-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.205083 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-inventory" (OuterVolumeSpecName: "inventory") pod "87824ee6-3fd5-4157-afdb-4e524127dae8" (UID: "87824ee6-3fd5-4157-afdb-4e524127dae8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.220062 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "87824ee6-3fd5-4157-afdb-4e524127dae8" (UID: "87824ee6-3fd5-4157-afdb-4e524127dae8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.224881 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceilometer-ipmi-config-data-2" (OuterVolumeSpecName: "ceilometer-ipmi-config-data-2") pod "87824ee6-3fd5-4157-afdb-4e524127dae8" (UID: "87824ee6-3fd5-4157-afdb-4e524127dae8"). InnerVolumeSpecName "ceilometer-ipmi-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.226295 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceilometer-ipmi-config-data-1" (OuterVolumeSpecName: "ceilometer-ipmi-config-data-1") pod "87824ee6-3fd5-4157-afdb-4e524127dae8" (UID: "87824ee6-3fd5-4157-afdb-4e524127dae8"). InnerVolumeSpecName "ceilometer-ipmi-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.274318 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.274384 4809 reconciler_common.go:293] "Volume detached for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-telemetry-power-monitoring-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.274411 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fjzr\" (UniqueName: \"kubernetes.io/projected/87824ee6-3fd5-4157-afdb-4e524127dae8-kube-api-access-6fjzr\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.274430 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.274447 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.274463 4809 reconciler_common.go:293] "Volume detached for volume \"ceilometer-ipmi-config-data-2\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceilometer-ipmi-config-data-2\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.274481 4809 reconciler_common.go:293] "Volume detached for volume \"ceilometer-ipmi-config-data-0\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceilometer-ipmi-config-data-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.275066 4809 reconciler_common.go:293] "Volume detached for volume \"ceilometer-ipmi-config-data-1\" (UniqueName: \"kubernetes.io/secret/87824ee6-3fd5-4157-afdb-4e524127dae8-ceilometer-ipmi-config-data-1\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.542075 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" event={"ID":"87824ee6-3fd5-4157-afdb-4e524127dae8","Type":"ContainerDied","Data":"b553b935f07137e9778d7d87e26d6b8454c9a2a5805eea149ddd35c59d04bfbe"} Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.542366 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b553b935f07137e9778d7d87e26d6b8454c9a2a5805eea149ddd35c59d04bfbe" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.542160 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.680832 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn"] Sep 30 01:26:05 crc kubenswrapper[4809]: E0930 01:26:05.682430 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87824ee6-3fd5-4157-afdb-4e524127dae8" containerName="telemetry-power-monitoring-edpm-deployment-openstack-edpm-ipam" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.682466 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="87824ee6-3fd5-4157-afdb-4e524127dae8" containerName="telemetry-power-monitoring-edpm-deployment-openstack-edpm-ipam" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.683422 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="87824ee6-3fd5-4157-afdb-4e524127dae8" containerName="telemetry-power-monitoring-edpm-deployment-openstack-edpm-ipam" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.686502 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.692585 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.694320 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.694404 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"logging-compute-config-data" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.709428 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8nvsp" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.716073 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.716268 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.729611 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn"] Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.814947 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chw5v\" (UniqueName: \"kubernetes.io/projected/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-kube-api-access-chw5v\") pod \"logging-edpm-deployment-openstack-edpm-ipam-fp9bn\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.815078 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-logging-compute-config-data-0\") pod \"logging-edpm-deployment-openstack-edpm-ipam-fp9bn\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.815108 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-inventory\") pod \"logging-edpm-deployment-openstack-edpm-ipam-fp9bn\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.815176 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-logging-compute-config-data-1\") pod \"logging-edpm-deployment-openstack-edpm-ipam-fp9bn\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.815220 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-ceph\") pod \"logging-edpm-deployment-openstack-edpm-ipam-fp9bn\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.815275 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-ssh-key\") pod \"logging-edpm-deployment-openstack-edpm-ipam-fp9bn\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.917672 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chw5v\" (UniqueName: \"kubernetes.io/projected/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-kube-api-access-chw5v\") pod \"logging-edpm-deployment-openstack-edpm-ipam-fp9bn\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.917864 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-logging-compute-config-data-0\") pod \"logging-edpm-deployment-openstack-edpm-ipam-fp9bn\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.917908 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-inventory\") pod \"logging-edpm-deployment-openstack-edpm-ipam-fp9bn\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.917963 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-logging-compute-config-data-1\") pod \"logging-edpm-deployment-openstack-edpm-ipam-fp9bn\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.918032 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-ceph\") pod \"logging-edpm-deployment-openstack-edpm-ipam-fp9bn\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.918089 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-ssh-key\") pod \"logging-edpm-deployment-openstack-edpm-ipam-fp9bn\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.924292 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-logging-compute-config-data-0\") pod \"logging-edpm-deployment-openstack-edpm-ipam-fp9bn\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.924317 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-ssh-key\") pod \"logging-edpm-deployment-openstack-edpm-ipam-fp9bn\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.924754 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-ceph\") pod \"logging-edpm-deployment-openstack-edpm-ipam-fp9bn\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.924796 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-inventory\") pod \"logging-edpm-deployment-openstack-edpm-ipam-fp9bn\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.934393 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-logging-compute-config-data-1\") pod \"logging-edpm-deployment-openstack-edpm-ipam-fp9bn\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:05 crc kubenswrapper[4809]: I0930 01:26:05.936796 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chw5v\" (UniqueName: \"kubernetes.io/projected/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-kube-api-access-chw5v\") pod \"logging-edpm-deployment-openstack-edpm-ipam-fp9bn\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:06 crc kubenswrapper[4809]: I0930 01:26:06.035656 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:06 crc kubenswrapper[4809]: I0930 01:26:06.683213 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn"] Sep 30 01:26:06 crc kubenswrapper[4809]: I0930 01:26:06.696573 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 01:26:07 crc kubenswrapper[4809]: I0930 01:26:07.563524 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" event={"ID":"9ab038a4-a7ad-46da-8479-89b9aa5a30e8","Type":"ContainerStarted","Data":"f92e3e35003b3d30310cb3a1010ed110345d8b23274779101a7522e520fe6ce5"} Sep 30 01:26:07 crc kubenswrapper[4809]: I0930 01:26:07.564095 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" event={"ID":"9ab038a4-a7ad-46da-8479-89b9aa5a30e8","Type":"ContainerStarted","Data":"0263ebb4948ab629b075dbf389677deba55913e346e42bcfede325d26b8c964d"} Sep 30 01:26:07 crc kubenswrapper[4809]: I0930 01:26:07.598105 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" podStartSLOduration=2.127299009 podStartE2EDuration="2.598083417s" podCreationTimestamp="2025-09-30 01:26:05 +0000 UTC" firstStartedPulling="2025-09-30 01:26:06.696141111 +0000 UTC m=+4617.732390559" lastFinishedPulling="2025-09-30 01:26:07.166925519 +0000 UTC m=+4618.203174967" observedRunningTime="2025-09-30 01:26:07.587237988 +0000 UTC m=+4618.623487396" watchObservedRunningTime="2025-09-30 01:26:07.598083417 +0000 UTC m=+4618.634332835" Sep 30 01:26:23 crc kubenswrapper[4809]: I0930 01:26:23.773473 4809 generic.go:334] "Generic (PLEG): container finished" podID="9ab038a4-a7ad-46da-8479-89b9aa5a30e8" containerID="f92e3e35003b3d30310cb3a1010ed110345d8b23274779101a7522e520fe6ce5" exitCode=0 Sep 30 01:26:23 crc kubenswrapper[4809]: I0930 01:26:23.773614 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" event={"ID":"9ab038a4-a7ad-46da-8479-89b9aa5a30e8","Type":"ContainerDied","Data":"f92e3e35003b3d30310cb3a1010ed110345d8b23274779101a7522e520fe6ce5"} Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.323358 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.391413 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chw5v\" (UniqueName: \"kubernetes.io/projected/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-kube-api-access-chw5v\") pod \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.391535 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-inventory\") pod \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.391565 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logging-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-logging-compute-config-data-0\") pod \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.391611 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logging-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-logging-compute-config-data-1\") pod \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.399041 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-kube-api-access-chw5v" (OuterVolumeSpecName: "kube-api-access-chw5v") pod "9ab038a4-a7ad-46da-8479-89b9aa5a30e8" (UID: "9ab038a4-a7ad-46da-8479-89b9aa5a30e8"). InnerVolumeSpecName "kube-api-access-chw5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.493894 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-ceph\") pod \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.493949 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-ssh-key\") pod \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\" (UID: \"9ab038a4-a7ad-46da-8479-89b9aa5a30e8\") " Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.494478 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chw5v\" (UniqueName: \"kubernetes.io/projected/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-kube-api-access-chw5v\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.497937 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-ceph" (OuterVolumeSpecName: "ceph") pod "9ab038a4-a7ad-46da-8479-89b9aa5a30e8" (UID: "9ab038a4-a7ad-46da-8479-89b9aa5a30e8"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.532019 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-logging-compute-config-data-1" (OuterVolumeSpecName: "logging-compute-config-data-1") pod "9ab038a4-a7ad-46da-8479-89b9aa5a30e8" (UID: "9ab038a4-a7ad-46da-8479-89b9aa5a30e8"). InnerVolumeSpecName "logging-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.542594 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9ab038a4-a7ad-46da-8479-89b9aa5a30e8" (UID: "9ab038a4-a7ad-46da-8479-89b9aa5a30e8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.545981 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-logging-compute-config-data-0" (OuterVolumeSpecName: "logging-compute-config-data-0") pod "9ab038a4-a7ad-46da-8479-89b9aa5a30e8" (UID: "9ab038a4-a7ad-46da-8479-89b9aa5a30e8"). InnerVolumeSpecName "logging-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.562064 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-inventory" (OuterVolumeSpecName: "inventory") pod "9ab038a4-a7ad-46da-8479-89b9aa5a30e8" (UID: "9ab038a4-a7ad-46da-8479-89b9aa5a30e8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.595450 4809 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.595489 4809 reconciler_common.go:293] "Volume detached for volume \"logging-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-logging-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.595499 4809 reconciler_common.go:293] "Volume detached for volume \"logging-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-logging-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.595509 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.595519 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9ab038a4-a7ad-46da-8479-89b9aa5a30e8-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.804823 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" event={"ID":"9ab038a4-a7ad-46da-8479-89b9aa5a30e8","Type":"ContainerDied","Data":"0263ebb4948ab629b075dbf389677deba55913e346e42bcfede325d26b8c964d"} Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.805036 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0263ebb4948ab629b075dbf389677deba55913e346e42bcfede325d26b8c964d" Sep 30 01:26:25 crc kubenswrapper[4809]: I0930 01:26:25.804899 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-fp9bn" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.650475 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Sep 30 01:26:41 crc kubenswrapper[4809]: E0930 01:26:41.651498 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ab038a4-a7ad-46da-8479-89b9aa5a30e8" containerName="logging-edpm-deployment-openstack-edpm-ipam" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.651516 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ab038a4-a7ad-46da-8479-89b9aa5a30e8" containerName="logging-edpm-deployment-openstack-edpm-ipam" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.651781 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ab038a4-a7ad-46da-8479-89b9aa5a30e8" containerName="logging-edpm-deployment-openstack-edpm-ipam" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.653005 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.656401 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.659214 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.659931 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.661844 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.671223 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.684054 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.730666 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.842731 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6747706c-74d5-4d04-8d05-74868b8c1f28-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.843049 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.843157 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.843271 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-run\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.843369 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4975857-a339-4008-b600-76960da1412a-scripts\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.843456 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6747706c-74d5-4d04-8d05-74868b8c1f28-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.843529 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfx8p\" (UniqueName: \"kubernetes.io/projected/b4975857-a339-4008-b600-76960da1412a-kube-api-access-lfx8p\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.843649 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4975857-a339-4008-b600-76960da1412a-config-data\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.843841 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.843899 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-dev\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.843923 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.843954 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.843978 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.844041 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.844069 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6747706c-74d5-4d04-8d05-74868b8c1f28-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.844125 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.844147 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6747706c-74d5-4d04-8d05-74868b8c1f28-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.844175 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-etc-nvme\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.844192 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6747706c-74d5-4d04-8d05-74868b8c1f28-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.844215 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52rgw\" (UniqueName: \"kubernetes.io/projected/6747706c-74d5-4d04-8d05-74868b8c1f28-kube-api-access-52rgw\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.844241 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-sys\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.844312 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4975857-a339-4008-b600-76960da1412a-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.844334 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-lib-modules\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.844372 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-run\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.844402 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-dev\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.844423 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b4975857-a339-4008-b600-76960da1412a-config-data-custom\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.844456 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.844482 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.844525 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.844605 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-sys\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.844720 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.844846 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b4975857-a339-4008-b600-76960da1412a-ceph\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.945652 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946029 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6747706c-74d5-4d04-8d05-74868b8c1f28-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946083 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6747706c-74d5-4d04-8d05-74868b8c1f28-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.945810 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946104 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946166 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946252 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-etc-nvme\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946315 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6747706c-74d5-4d04-8d05-74868b8c1f28-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946366 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52rgw\" (UniqueName: \"kubernetes.io/projected/6747706c-74d5-4d04-8d05-74868b8c1f28-kube-api-access-52rgw\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946401 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-sys\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946460 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-sys\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946391 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-etc-nvme\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946558 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4975857-a339-4008-b600-76960da1412a-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946582 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-lib-modules\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946606 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-run\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946632 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-dev\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946668 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b4975857-a339-4008-b600-76960da1412a-config-data-custom\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946687 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946703 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-run\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946707 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946765 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-dev\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946780 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946868 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946706 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-lib-modules\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946931 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-sys\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.946952 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-sys\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.947029 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.947080 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b4975857-a339-4008-b600-76960da1412a-ceph\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.947116 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.947130 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6747706c-74d5-4d04-8d05-74868b8c1f28-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.947231 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.947269 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.947355 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-run\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.947391 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4975857-a339-4008-b600-76960da1412a-scripts\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.947421 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6747706c-74d5-4d04-8d05-74868b8c1f28-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.947448 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfx8p\" (UniqueName: \"kubernetes.io/projected/b4975857-a339-4008-b600-76960da1412a-kube-api-access-lfx8p\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.947481 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4975857-a339-4008-b600-76960da1412a-config-data\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.947522 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.947551 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-dev\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.947582 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.947602 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.947635 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.947761 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.947863 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.948007 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.948143 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.948163 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-dev\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.948182 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-run\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.948212 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.948245 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.949400 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/6747706c-74d5-4d04-8d05-74868b8c1f28-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.949571 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/b4975857-a339-4008-b600-76960da1412a-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.952422 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6747706c-74d5-4d04-8d05-74868b8c1f28-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.952612 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4975857-a339-4008-b600-76960da1412a-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.952808 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b4975857-a339-4008-b600-76960da1412a-ceph\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.953399 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6747706c-74d5-4d04-8d05-74868b8c1f28-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.954679 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b4975857-a339-4008-b600-76960da1412a-config-data-custom\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.955193 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6747706c-74d5-4d04-8d05-74868b8c1f28-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.957820 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4975857-a339-4008-b600-76960da1412a-config-data\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.961137 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6747706c-74d5-4d04-8d05-74868b8c1f28-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.965106 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4975857-a339-4008-b600-76960da1412a-scripts\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.967308 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52rgw\" (UniqueName: \"kubernetes.io/projected/6747706c-74d5-4d04-8d05-74868b8c1f28-kube-api-access-52rgw\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.967533 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6747706c-74d5-4d04-8d05-74868b8c1f28-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"6747706c-74d5-4d04-8d05-74868b8c1f28\") " pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.969104 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfx8p\" (UniqueName: \"kubernetes.io/projected/b4975857-a339-4008-b600-76960da1412a-kube-api-access-lfx8p\") pod \"cinder-backup-0\" (UID: \"b4975857-a339-4008-b600-76960da1412a\") " pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.978367 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Sep 30 01:26:41 crc kubenswrapper[4809]: I0930 01:26:41.989554 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.436498 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-4hp49"] Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.438324 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-4hp49" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.457130 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-4hp49"] Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.528319 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.530526 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.534110 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.535224 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-dxw52" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.535401 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.535538 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.551139 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.559421 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2rnh\" (UniqueName: \"kubernetes.io/projected/b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf-kube-api-access-f2rnh\") pod \"manila-db-create-4hp49\" (UID: \"b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf\") " pod="openstack/manila-db-create-4hp49" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.567859 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.572712 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.578801 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.578969 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.586241 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.643495 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7b6f577d77-fcqjv"] Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.646031 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b6f577d77-fcqjv" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.651615 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.651799 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.651836 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.652125 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-2nthd" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.664218 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-config-data\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.664248 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bg92x\" (UniqueName: \"kubernetes.io/projected/cf5095a1-3b66-49aa-9736-8d47478ada7a-kube-api-access-bg92x\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.664429 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cf5095a1-3b66-49aa-9736-8d47478ada7a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.664490 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.664583 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/cf5095a1-3b66-49aa-9736-8d47478ada7a-ceph\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.664723 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.664762 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2rnh\" (UniqueName: \"kubernetes.io/projected/b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf-kube-api-access-f2rnh\") pod \"manila-db-create-4hp49\" (UID: \"b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf\") " pod="openstack/manila-db-create-4hp49" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.664834 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.664868 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-scripts\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.664959 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf5095a1-3b66-49aa-9736-8d47478ada7a-logs\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.704315 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 01:26:42 crc kubenswrapper[4809]: E0930 01:26:42.705369 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[ceph combined-ca-bundle config-data glance httpd-run internal-tls-certs kube-api-access-wcsmq logs scripts], unattached volumes=[], failed to process volumes=[ceph combined-ca-bundle config-data glance httpd-run internal-tls-certs kube-api-access-wcsmq logs scripts]: context canceled" pod="openstack/glance-default-internal-api-0" podUID="67357d2c-c046-4ca3-95ea-fc92bec5e91c" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.736022 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7b6f577d77-fcqjv"] Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.765786 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2rnh\" (UniqueName: \"kubernetes.io/projected/b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf-kube-api-access-f2rnh\") pod \"manila-db-create-4hp49\" (UID: \"b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf\") " pod="openstack/manila-db-create-4hp49" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.767316 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.767379 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-scripts\") pod \"horizon-7b6f577d77-fcqjv\" (UID: \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\") " pod="openstack/horizon-7b6f577d77-fcqjv" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.767446 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/67357d2c-c046-4ca3-95ea-fc92bec5e91c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.767508 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-config-data\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.767541 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bg92x\" (UniqueName: \"kubernetes.io/projected/cf5095a1-3b66-49aa-9736-8d47478ada7a-kube-api-access-bg92x\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.767593 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cf5095a1-3b66-49aa-9736-8d47478ada7a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.767626 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fc4t\" (UniqueName: \"kubernetes.io/projected/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-kube-api-access-5fc4t\") pod \"horizon-7b6f577d77-fcqjv\" (UID: \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\") " pod="openstack/horizon-7b6f577d77-fcqjv" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.767679 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.767716 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.767788 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/cf5095a1-3b66-49aa-9736-8d47478ada7a-ceph\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.767828 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.767855 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.767915 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.767949 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.767987 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-scripts\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.768012 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-logs\") pod \"horizon-7b6f577d77-fcqjv\" (UID: \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\") " pod="openstack/horizon-7b6f577d77-fcqjv" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.768038 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-horizon-secret-key\") pod \"horizon-7b6f577d77-fcqjv\" (UID: \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\") " pod="openstack/horizon-7b6f577d77-fcqjv" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.768068 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf5095a1-3b66-49aa-9736-8d47478ada7a-logs\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.768101 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67357d2c-c046-4ca3-95ea-fc92bec5e91c-logs\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.768107 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.768123 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcsmq\" (UniqueName: \"kubernetes.io/projected/67357d2c-c046-4ca3-95ea-fc92bec5e91c-kube-api-access-wcsmq\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.769561 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf5095a1-3b66-49aa-9736-8d47478ada7a-logs\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.780259 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cf5095a1-3b66-49aa-9736-8d47478ada7a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.780571 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.791314 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-4hp49" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.791777 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/67357d2c-c046-4ca3-95ea-fc92bec5e91c-ceph\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.791808 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-config-data\") pod \"horizon-7b6f577d77-fcqjv\" (UID: \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\") " pod="openstack/horizon-7b6f577d77-fcqjv" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.793158 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.806328 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bg92x\" (UniqueName: \"kubernetes.io/projected/cf5095a1-3b66-49aa-9736-8d47478ada7a-kube-api-access-bg92x\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.820959 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 01:26:42 crc kubenswrapper[4809]: E0930 01:26:42.822052 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[ceph combined-ca-bundle config-data glance public-tls-certs scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/glance-default-external-api-0" podUID="cf5095a1-3b66-49aa-9736-8d47478ada7a" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.830325 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-scripts\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.835169 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-config-data\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.847357 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/cf5095a1-3b66-49aa-9736-8d47478ada7a-ceph\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.849195 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.850012 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.890910 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-55dcbdc977-npgqs"] Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.894049 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-55dcbdc977-npgqs" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.896179 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.899828 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.900015 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-logs\") pod \"horizon-7b6f577d77-fcqjv\" (UID: \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\") " pod="openstack/horizon-7b6f577d77-fcqjv" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.900100 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-horizon-secret-key\") pod \"horizon-7b6f577d77-fcqjv\" (UID: \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\") " pod="openstack/horizon-7b6f577d77-fcqjv" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.900198 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67357d2c-c046-4ca3-95ea-fc92bec5e91c-logs\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.900279 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcsmq\" (UniqueName: \"kubernetes.io/projected/67357d2c-c046-4ca3-95ea-fc92bec5e91c-kube-api-access-wcsmq\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.900376 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.900451 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/67357d2c-c046-4ca3-95ea-fc92bec5e91c-ceph\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.900725 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-config-data\") pod \"horizon-7b6f577d77-fcqjv\" (UID: \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\") " pod="openstack/horizon-7b6f577d77-fcqjv" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.901202 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.901426 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-scripts\") pod \"horizon-7b6f577d77-fcqjv\" (UID: \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\") " pod="openstack/horizon-7b6f577d77-fcqjv" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.901953 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/67357d2c-c046-4ca3-95ea-fc92bec5e91c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.902184 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fc4t\" (UniqueName: \"kubernetes.io/projected/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-kube-api-access-5fc4t\") pod \"horizon-7b6f577d77-fcqjv\" (UID: \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\") " pod="openstack/horizon-7b6f577d77-fcqjv" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.902873 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.908648 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.909341 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-logs\") pod \"horizon-7b6f577d77-fcqjv\" (UID: \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\") " pod="openstack/horizon-7b6f577d77-fcqjv" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.914385 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/67357d2c-c046-4ca3-95ea-fc92bec5e91c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.914943 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-scripts\") pod \"horizon-7b6f577d77-fcqjv\" (UID: \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\") " pod="openstack/horizon-7b6f577d77-fcqjv" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.915258 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67357d2c-c046-4ca3-95ea-fc92bec5e91c-logs\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.916918 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-config-data\") pod \"horizon-7b6f577d77-fcqjv\" (UID: \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\") " pod="openstack/horizon-7b6f577d77-fcqjv" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.945619 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.947144 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-55dcbdc977-npgqs"] Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.949173 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.951032 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/67357d2c-c046-4ca3-95ea-fc92bec5e91c-ceph\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.951662 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-horizon-secret-key\") pod \"horizon-7b6f577d77-fcqjv\" (UID: \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\") " pod="openstack/horizon-7b6f577d77-fcqjv" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.952286 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.952503 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.953264 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.964199 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fc4t\" (UniqueName: \"kubernetes.io/projected/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-kube-api-access-5fc4t\") pod \"horizon-7b6f577d77-fcqjv\" (UID: \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\") " pod="openstack/horizon-7b6f577d77-fcqjv" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.966802 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcsmq\" (UniqueName: \"kubernetes.io/projected/67357d2c-c046-4ca3-95ea-fc92bec5e91c-kube-api-access-wcsmq\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:42 crc kubenswrapper[4809]: I0930 01:26:42.971465 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b6f577d77-fcqjv" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.008075 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/56c24c30-a3af-4fb9-907b-6240a47687b0-scripts\") pod \"horizon-55dcbdc977-npgqs\" (UID: \"56c24c30-a3af-4fb9-907b-6240a47687b0\") " pod="openstack/horizon-55dcbdc977-npgqs" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.008210 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/56c24c30-a3af-4fb9-907b-6240a47687b0-horizon-secret-key\") pod \"horizon-55dcbdc977-npgqs\" (UID: \"56c24c30-a3af-4fb9-907b-6240a47687b0\") " pod="openstack/horizon-55dcbdc977-npgqs" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.008270 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbsfr\" (UniqueName: \"kubernetes.io/projected/56c24c30-a3af-4fb9-907b-6240a47687b0-kube-api-access-cbsfr\") pod \"horizon-55dcbdc977-npgqs\" (UID: \"56c24c30-a3af-4fb9-907b-6240a47687b0\") " pod="openstack/horizon-55dcbdc977-npgqs" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.008356 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56c24c30-a3af-4fb9-907b-6240a47687b0-logs\") pod \"horizon-55dcbdc977-npgqs\" (UID: \"56c24c30-a3af-4fb9-907b-6240a47687b0\") " pod="openstack/horizon-55dcbdc977-npgqs" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.008390 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/56c24c30-a3af-4fb9-907b-6240a47687b0-config-data\") pod \"horizon-55dcbdc977-npgqs\" (UID: \"56c24c30-a3af-4fb9-907b-6240a47687b0\") " pod="openstack/horizon-55dcbdc977-npgqs" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.025009 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.025400 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"b4975857-a339-4008-b600-76960da1412a","Type":"ContainerStarted","Data":"c83f3a7c1624fb0d4533f4d3d84b44d0cd5bbee1c9e893cbf6ea89533b10de85"} Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.025442 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.077592 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.110118 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbsfr\" (UniqueName: \"kubernetes.io/projected/56c24c30-a3af-4fb9-907b-6240a47687b0-kube-api-access-cbsfr\") pod \"horizon-55dcbdc977-npgqs\" (UID: \"56c24c30-a3af-4fb9-907b-6240a47687b0\") " pod="openstack/horizon-55dcbdc977-npgqs" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.110240 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56c24c30-a3af-4fb9-907b-6240a47687b0-logs\") pod \"horizon-55dcbdc977-npgqs\" (UID: \"56c24c30-a3af-4fb9-907b-6240a47687b0\") " pod="openstack/horizon-55dcbdc977-npgqs" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.110288 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/56c24c30-a3af-4fb9-907b-6240a47687b0-config-data\") pod \"horizon-55dcbdc977-npgqs\" (UID: \"56c24c30-a3af-4fb9-907b-6240a47687b0\") " pod="openstack/horizon-55dcbdc977-npgqs" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.111580 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56c24c30-a3af-4fb9-907b-6240a47687b0-logs\") pod \"horizon-55dcbdc977-npgqs\" (UID: \"56c24c30-a3af-4fb9-907b-6240a47687b0\") " pod="openstack/horizon-55dcbdc977-npgqs" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.115069 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/56c24c30-a3af-4fb9-907b-6240a47687b0-scripts\") pod \"horizon-55dcbdc977-npgqs\" (UID: \"56c24c30-a3af-4fb9-907b-6240a47687b0\") " pod="openstack/horizon-55dcbdc977-npgqs" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.115280 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/56c24c30-a3af-4fb9-907b-6240a47687b0-horizon-secret-key\") pod \"horizon-55dcbdc977-npgqs\" (UID: \"56c24c30-a3af-4fb9-907b-6240a47687b0\") " pod="openstack/horizon-55dcbdc977-npgqs" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.121837 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/56c24c30-a3af-4fb9-907b-6240a47687b0-horizon-secret-key\") pod \"horizon-55dcbdc977-npgqs\" (UID: \"56c24c30-a3af-4fb9-907b-6240a47687b0\") " pod="openstack/horizon-55dcbdc977-npgqs" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.126158 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/56c24c30-a3af-4fb9-907b-6240a47687b0-config-data\") pod \"horizon-55dcbdc977-npgqs\" (UID: \"56c24c30-a3af-4fb9-907b-6240a47687b0\") " pod="openstack/horizon-55dcbdc977-npgqs" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.128168 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/56c24c30-a3af-4fb9-907b-6240a47687b0-scripts\") pod \"horizon-55dcbdc977-npgqs\" (UID: \"56c24c30-a3af-4fb9-907b-6240a47687b0\") " pod="openstack/horizon-55dcbdc977-npgqs" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.138917 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbsfr\" (UniqueName: \"kubernetes.io/projected/56c24c30-a3af-4fb9-907b-6240a47687b0-kube-api-access-cbsfr\") pod \"horizon-55dcbdc977-npgqs\" (UID: \"56c24c30-a3af-4fb9-907b-6240a47687b0\") " pod="openstack/horizon-55dcbdc977-npgqs" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.209053 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-55dcbdc977-npgqs" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.241342 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.253166 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.436684 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-scripts\") pod \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.436734 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67357d2c-c046-4ca3-95ea-fc92bec5e91c-logs\") pod \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.436753 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-combined-ca-bundle\") pod \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.436786 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.436812 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-combined-ca-bundle\") pod \"cf5095a1-3b66-49aa-9736-8d47478ada7a\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.436826 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-scripts\") pod \"cf5095a1-3b66-49aa-9736-8d47478ada7a\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.436845 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-config-data\") pod \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.436862 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-public-tls-certs\") pod \"cf5095a1-3b66-49aa-9736-8d47478ada7a\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.436882 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"cf5095a1-3b66-49aa-9736-8d47478ada7a\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.436916 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-config-data\") pod \"cf5095a1-3b66-49aa-9736-8d47478ada7a\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.436979 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bg92x\" (UniqueName: \"kubernetes.io/projected/cf5095a1-3b66-49aa-9736-8d47478ada7a-kube-api-access-bg92x\") pod \"cf5095a1-3b66-49aa-9736-8d47478ada7a\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.437079 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf5095a1-3b66-49aa-9736-8d47478ada7a-logs\") pod \"cf5095a1-3b66-49aa-9736-8d47478ada7a\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.437152 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-internal-tls-certs\") pod \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.437198 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cf5095a1-3b66-49aa-9736-8d47478ada7a-httpd-run\") pod \"cf5095a1-3b66-49aa-9736-8d47478ada7a\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.437243 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/cf5095a1-3b66-49aa-9736-8d47478ada7a-ceph\") pod \"cf5095a1-3b66-49aa-9736-8d47478ada7a\" (UID: \"cf5095a1-3b66-49aa-9736-8d47478ada7a\") " Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.437268 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/67357d2c-c046-4ca3-95ea-fc92bec5e91c-ceph\") pod \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.437296 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/67357d2c-c046-4ca3-95ea-fc92bec5e91c-httpd-run\") pod \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.437316 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcsmq\" (UniqueName: \"kubernetes.io/projected/67357d2c-c046-4ca3-95ea-fc92bec5e91c-kube-api-access-wcsmq\") pod \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\" (UID: \"67357d2c-c046-4ca3-95ea-fc92bec5e91c\") " Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.440550 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf5095a1-3b66-49aa-9736-8d47478ada7a-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "cf5095a1-3b66-49aa-9736-8d47478ada7a" (UID: "cf5095a1-3b66-49aa-9736-8d47478ada7a"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.442406 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf5095a1-3b66-49aa-9736-8d47478ada7a-logs" (OuterVolumeSpecName: "logs") pod "cf5095a1-3b66-49aa-9736-8d47478ada7a" (UID: "cf5095a1-3b66-49aa-9736-8d47478ada7a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.444420 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67357d2c-c046-4ca3-95ea-fc92bec5e91c-logs" (OuterVolumeSpecName: "logs") pod "67357d2c-c046-4ca3-95ea-fc92bec5e91c" (UID: "67357d2c-c046-4ca3-95ea-fc92bec5e91c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.445993 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67357d2c-c046-4ca3-95ea-fc92bec5e91c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "67357d2c-c046-4ca3-95ea-fc92bec5e91c" (UID: "67357d2c-c046-4ca3-95ea-fc92bec5e91c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.446162 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-scripts" (OuterVolumeSpecName: "scripts") pod "cf5095a1-3b66-49aa-9736-8d47478ada7a" (UID: "cf5095a1-3b66-49aa-9736-8d47478ada7a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.446306 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67357d2c-c046-4ca3-95ea-fc92bec5e91c-kube-api-access-wcsmq" (OuterVolumeSpecName: "kube-api-access-wcsmq") pod "67357d2c-c046-4ca3-95ea-fc92bec5e91c" (UID: "67357d2c-c046-4ca3-95ea-fc92bec5e91c"). InnerVolumeSpecName "kube-api-access-wcsmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.448412 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-scripts" (OuterVolumeSpecName: "scripts") pod "67357d2c-c046-4ca3-95ea-fc92bec5e91c" (UID: "67357d2c-c046-4ca3-95ea-fc92bec5e91c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.448547 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf5095a1-3b66-49aa-9736-8d47478ada7a" (UID: "cf5095a1-3b66-49aa-9736-8d47478ada7a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.448709 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-config-data" (OuterVolumeSpecName: "config-data") pod "67357d2c-c046-4ca3-95ea-fc92bec5e91c" (UID: "67357d2c-c046-4ca3-95ea-fc92bec5e91c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.448427 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "cf5095a1-3b66-49aa-9736-8d47478ada7a" (UID: "cf5095a1-3b66-49aa-9736-8d47478ada7a"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.450412 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "67357d2c-c046-4ca3-95ea-fc92bec5e91c" (UID: "67357d2c-c046-4ca3-95ea-fc92bec5e91c"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.452923 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "67357d2c-c046-4ca3-95ea-fc92bec5e91c" (UID: "67357d2c-c046-4ca3-95ea-fc92bec5e91c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.452987 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-config-data" (OuterVolumeSpecName: "config-data") pod "cf5095a1-3b66-49aa-9736-8d47478ada7a" (UID: "cf5095a1-3b66-49aa-9736-8d47478ada7a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.452973 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf5095a1-3b66-49aa-9736-8d47478ada7a-ceph" (OuterVolumeSpecName: "ceph") pod "cf5095a1-3b66-49aa-9736-8d47478ada7a" (UID: "cf5095a1-3b66-49aa-9736-8d47478ada7a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.453012 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67357d2c-c046-4ca3-95ea-fc92bec5e91c-ceph" (OuterVolumeSpecName: "ceph") pod "67357d2c-c046-4ca3-95ea-fc92bec5e91c" (UID: "67357d2c-c046-4ca3-95ea-fc92bec5e91c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.453052 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf5095a1-3b66-49aa-9736-8d47478ada7a-kube-api-access-bg92x" (OuterVolumeSpecName: "kube-api-access-bg92x") pod "cf5095a1-3b66-49aa-9736-8d47478ada7a" (UID: "cf5095a1-3b66-49aa-9736-8d47478ada7a"). InnerVolumeSpecName "kube-api-access-bg92x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.453111 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "67357d2c-c046-4ca3-95ea-fc92bec5e91c" (UID: "67357d2c-c046-4ca3-95ea-fc92bec5e91c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.455206 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "cf5095a1-3b66-49aa-9736-8d47478ada7a" (UID: "cf5095a1-3b66-49aa-9736-8d47478ada7a"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.535895 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-4hp49"] Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.545348 4809 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cf5095a1-3b66-49aa-9736-8d47478ada7a-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.545383 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/cf5095a1-3b66-49aa-9736-8d47478ada7a-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.545395 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/67357d2c-c046-4ca3-95ea-fc92bec5e91c-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.545408 4809 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/67357d2c-c046-4ca3-95ea-fc92bec5e91c-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.545420 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcsmq\" (UniqueName: \"kubernetes.io/projected/67357d2c-c046-4ca3-95ea-fc92bec5e91c-kube-api-access-wcsmq\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.545433 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.545444 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.545455 4809 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67357d2c-c046-4ca3-95ea-fc92bec5e91c-logs\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.545496 4809 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.545509 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.545520 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.545531 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.545545 4809 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.545564 4809 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.545576 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf5095a1-3b66-49aa-9736-8d47478ada7a-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.545587 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bg92x\" (UniqueName: \"kubernetes.io/projected/cf5095a1-3b66-49aa-9736-8d47478ada7a-kube-api-access-bg92x\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.545597 4809 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf5095a1-3b66-49aa-9736-8d47478ada7a-logs\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.545608 4809 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/67357d2c-c046-4ca3-95ea-fc92bec5e91c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.584751 4809 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.616972 4809 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.647826 4809 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.647849 4809 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.648034 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.657495 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7b6f577d77-fcqjv"] Sep 30 01:26:43 crc kubenswrapper[4809]: I0930 01:26:43.755220 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-55dcbdc977-npgqs"] Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.042472 4809 generic.go:334] "Generic (PLEG): container finished" podID="b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf" containerID="1b787b78deaa0276ea8e36737f24a128810e2a74ed35cb6bf74305e0964767ee" exitCode=0 Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.042521 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-4hp49" event={"ID":"b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf","Type":"ContainerDied","Data":"1b787b78deaa0276ea8e36737f24a128810e2a74ed35cb6bf74305e0964767ee"} Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.043019 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-4hp49" event={"ID":"b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf","Type":"ContainerStarted","Data":"4d5677be754e0586544293202410ba587781c59f184312769619da06b55a58cd"} Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.044585 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"6747706c-74d5-4d04-8d05-74868b8c1f28","Type":"ContainerStarted","Data":"83d59f056b22ecf21d884936839dcda47fd87c4f29fd3bc9f1f3e0d5f7a5b992"} Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.046169 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55dcbdc977-npgqs" event={"ID":"56c24c30-a3af-4fb9-907b-6240a47687b0","Type":"ContainerStarted","Data":"ef14dc68c6b36c654e90d790610cf0ad9aaae8309772eb8735eda1869944ec2f"} Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.052716 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"b4975857-a339-4008-b600-76960da1412a","Type":"ContainerStarted","Data":"40aab0a89bbe6cc281c4ad05994f31a466d20504f0a7417c15562bb30e5855e3"} Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.058351 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.058963 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b6f577d77-fcqjv" event={"ID":"c1b7a534-1f99-4f28-af81-d31c55d4e7f0","Type":"ContainerStarted","Data":"14e2d9a83169b1f2799905e66d8b4ec719b5f0e8e7cfcc4f47f5b57992c3b948"} Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.059016 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.245137 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.271533 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.315544 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.317748 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.321189 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.321364 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.324525 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-dxw52" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.324665 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.342950 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.367128 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.391937 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.415770 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.418654 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.424058 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.424638 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.426164 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.466908 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.467014 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/16e7cb52-e504-487a-bf82-ca5aff7e7149-ceph\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.467047 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.467095 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-scripts\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.467137 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16e7cb52-e504-487a-bf82-ca5aff7e7149-logs\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.467156 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk5ls\" (UniqueName: \"kubernetes.io/projected/16e7cb52-e504-487a-bf82-ca5aff7e7149-kube-api-access-gk5ls\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.467170 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-config-data\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.467228 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.467273 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/16e7cb52-e504-487a-bf82-ca5aff7e7149-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.569538 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/16e7cb52-e504-487a-bf82-ca5aff7e7149-ceph\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.569612 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.569668 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-logs\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.569698 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.569793 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.569837 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-scripts\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.569934 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16e7cb52-e504-487a-bf82-ca5aff7e7149-logs\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.569984 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk5ls\" (UniqueName: \"kubernetes.io/projected/16e7cb52-e504-487a-bf82-ca5aff7e7149-kube-api-access-gk5ls\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.570073 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-config-data\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.570176 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.570220 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.570240 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.570260 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/16e7cb52-e504-487a-bf82-ca5aff7e7149-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.570351 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.570427 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-ceph\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.570492 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.570847 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.570874 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8w9zw\" (UniqueName: \"kubernetes.io/projected/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-kube-api-access-8w9zw\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.571320 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.571455 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16e7cb52-e504-487a-bf82-ca5aff7e7149-logs\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.571661 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/16e7cb52-e504-487a-bf82-ca5aff7e7149-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.579354 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.579532 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/16e7cb52-e504-487a-bf82-ca5aff7e7149-ceph\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.580624 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-config-data\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.580845 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.589934 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-scripts\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.594008 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk5ls\" (UniqueName: \"kubernetes.io/projected/16e7cb52-e504-487a-bf82-ca5aff7e7149-kube-api-access-gk5ls\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.628799 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.651357 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.672497 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.672743 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-logs\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.672702 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.672991 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.673088 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.673114 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.673150 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.673168 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-ceph\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.673252 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.673285 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8w9zw\" (UniqueName: \"kubernetes.io/projected/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-kube-api-access-8w9zw\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.674151 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-logs\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.675587 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.679165 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.699273 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.701933 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.703868 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.710908 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-ceph\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.713951 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8w9zw\" (UniqueName: \"kubernetes.io/projected/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-kube-api-access-8w9zw\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:44 crc kubenswrapper[4809]: I0930 01:26:44.802113 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.040534 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.078861 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"b4975857-a339-4008-b600-76960da1412a","Type":"ContainerStarted","Data":"70bbe7762cb324f23e5f0c0e997b0c6f00dc6aee30610601487936fbdf970162"} Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.083794 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"6747706c-74d5-4d04-8d05-74868b8c1f28","Type":"ContainerStarted","Data":"453828793b02a6469d10870f4936119c56e461bab6f9bc93725b08d20c652632"} Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.113934 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=3.168209034 podStartE2EDuration="4.113916744s" podCreationTimestamp="2025-09-30 01:26:41 +0000 UTC" firstStartedPulling="2025-09-30 01:26:42.761053673 +0000 UTC m=+4653.797303081" lastFinishedPulling="2025-09-30 01:26:43.706761383 +0000 UTC m=+4654.743010791" observedRunningTime="2025-09-30 01:26:45.103205769 +0000 UTC m=+4656.139455187" watchObservedRunningTime="2025-09-30 01:26:45.113916744 +0000 UTC m=+4656.150166152" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.326159 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-55dcbdc977-npgqs"] Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.375618 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-64c7465c7b-5tvxt"] Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.377742 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.383117 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.402980 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.433754 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-64c7465c7b-5tvxt"] Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.520098 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-config-data\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.521204 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-horizon-secret-key\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.521246 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-combined-ca-bundle\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.521299 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-scripts\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.521366 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-logs\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.521446 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-horizon-tls-certs\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.521546 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zw6dx\" (UniqueName: \"kubernetes.io/projected/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-kube-api-access-zw6dx\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.560719 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.603754 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.630038 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-combined-ca-bundle\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.630104 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-scripts\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.630187 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-logs\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.630237 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-horizon-tls-certs\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.630343 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zw6dx\" (UniqueName: \"kubernetes.io/projected/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-kube-api-access-zw6dx\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.630550 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-config-data\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.630603 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-horizon-secret-key\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.645491 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-logs\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.648049 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-scripts\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.649697 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-config-data\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.651742 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-horizon-tls-certs\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.652254 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-horizon-secret-key\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.662247 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-combined-ca-bundle\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.663959 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7b6f577d77-fcqjv"] Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.683823 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5b44f7d95d-84wt2"] Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.685751 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5b44f7d95d-84wt2"] Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.685867 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.693096 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zw6dx\" (UniqueName: \"kubernetes.io/projected/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-kube-api-access-zw6dx\") pod \"horizon-64c7465c7b-5tvxt\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.747555 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67357d2c-c046-4ca3-95ea-fc92bec5e91c" path="/var/lib/kubelet/pods/67357d2c-c046-4ca3-95ea-fc92bec5e91c/volumes" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.748257 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf5095a1-3b66-49aa-9736-8d47478ada7a" path="/var/lib/kubelet/pods/cf5095a1-3b66-49aa-9736-8d47478ada7a/volumes" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.823395 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.837121 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/818908b2-bd0c-49f5-a239-06ae4b04f236-horizon-tls-certs\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.837171 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/818908b2-bd0c-49f5-a239-06ae4b04f236-config-data\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.837288 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/818908b2-bd0c-49f5-a239-06ae4b04f236-combined-ca-bundle\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.837360 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/818908b2-bd0c-49f5-a239-06ae4b04f236-scripts\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.837397 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/818908b2-bd0c-49f5-a239-06ae4b04f236-horizon-secret-key\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.837433 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/818908b2-bd0c-49f5-a239-06ae4b04f236-logs\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.837470 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9w5fx\" (UniqueName: \"kubernetes.io/projected/818908b2-bd0c-49f5-a239-06ae4b04f236-kube-api-access-9w5fx\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.912961 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.939703 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/818908b2-bd0c-49f5-a239-06ae4b04f236-combined-ca-bundle\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.939778 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/818908b2-bd0c-49f5-a239-06ae4b04f236-scripts\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.939823 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/818908b2-bd0c-49f5-a239-06ae4b04f236-horizon-secret-key\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.939866 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/818908b2-bd0c-49f5-a239-06ae4b04f236-logs\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.939897 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9w5fx\" (UniqueName: \"kubernetes.io/projected/818908b2-bd0c-49f5-a239-06ae4b04f236-kube-api-access-9w5fx\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.939934 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/818908b2-bd0c-49f5-a239-06ae4b04f236-horizon-tls-certs\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.939951 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/818908b2-bd0c-49f5-a239-06ae4b04f236-config-data\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.940389 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/818908b2-bd0c-49f5-a239-06ae4b04f236-logs\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.941221 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/818908b2-bd0c-49f5-a239-06ae4b04f236-scripts\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:45 crc kubenswrapper[4809]: I0930 01:26:45.942930 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/818908b2-bd0c-49f5-a239-06ae4b04f236-config-data\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:46 crc kubenswrapper[4809]: I0930 01:26:46.105464 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"16e7cb52-e504-487a-bf82-ca5aff7e7149","Type":"ContainerStarted","Data":"5d48cb14fd158a18c3048affd3ae1f1b74534b557331dd57baf101429b5a0304"} Sep 30 01:26:46 crc kubenswrapper[4809]: I0930 01:26:46.407906 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/818908b2-bd0c-49f5-a239-06ae4b04f236-horizon-tls-certs\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:46 crc kubenswrapper[4809]: I0930 01:26:46.408100 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/818908b2-bd0c-49f5-a239-06ae4b04f236-combined-ca-bundle\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:46 crc kubenswrapper[4809]: I0930 01:26:46.411843 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/818908b2-bd0c-49f5-a239-06ae4b04f236-horizon-secret-key\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:46 crc kubenswrapper[4809]: I0930 01:26:46.412338 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9w5fx\" (UniqueName: \"kubernetes.io/projected/818908b2-bd0c-49f5-a239-06ae4b04f236-kube-api-access-9w5fx\") pod \"horizon-5b44f7d95d-84wt2\" (UID: \"818908b2-bd0c-49f5-a239-06ae4b04f236\") " pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:46 crc kubenswrapper[4809]: W0930 01:26:46.529046 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod79549f56_3ac0_4bea_898b_c9b8c4d4eebb.slice/crio-d59713d365f435b3ecc80c1671a76dc4ae310b063c00de9848dfddc7a56bd5dd WatchSource:0}: Error finding container d59713d365f435b3ecc80c1671a76dc4ae310b063c00de9848dfddc7a56bd5dd: Status 404 returned error can't find the container with id d59713d365f435b3ecc80c1671a76dc4ae310b063c00de9848dfddc7a56bd5dd Sep 30 01:26:46 crc kubenswrapper[4809]: I0930 01:26:46.629203 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:46 crc kubenswrapper[4809]: I0930 01:26:46.795152 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-4hp49" Sep 30 01:26:46 crc kubenswrapper[4809]: I0930 01:26:46.979087 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Sep 30 01:26:46 crc kubenswrapper[4809]: I0930 01:26:46.980438 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2rnh\" (UniqueName: \"kubernetes.io/projected/b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf-kube-api-access-f2rnh\") pod \"b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf\" (UID: \"b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf\") " Sep 30 01:26:47 crc kubenswrapper[4809]: I0930 01:26:47.004703 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf-kube-api-access-f2rnh" (OuterVolumeSpecName: "kube-api-access-f2rnh") pod "b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf" (UID: "b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf"). InnerVolumeSpecName "kube-api-access-f2rnh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:26:47 crc kubenswrapper[4809]: I0930 01:26:47.084263 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2rnh\" (UniqueName: \"kubernetes.io/projected/b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf-kube-api-access-f2rnh\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:47 crc kubenswrapper[4809]: I0930 01:26:47.138585 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-64c7465c7b-5tvxt"] Sep 30 01:26:47 crc kubenswrapper[4809]: I0930 01:26:47.143198 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"6747706c-74d5-4d04-8d05-74868b8c1f28","Type":"ContainerStarted","Data":"58e1e5ed66b336a6ab7c7f6c3ca302059ff3d8aa4030fbd191ff1369c8afc4a2"} Sep 30 01:26:47 crc kubenswrapper[4809]: I0930 01:26:47.145332 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"79549f56-3ac0-4bea-898b-c9b8c4d4eebb","Type":"ContainerStarted","Data":"d59713d365f435b3ecc80c1671a76dc4ae310b063c00de9848dfddc7a56bd5dd"} Sep 30 01:26:47 crc kubenswrapper[4809]: I0930 01:26:47.149996 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-4hp49" event={"ID":"b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf","Type":"ContainerDied","Data":"4d5677be754e0586544293202410ba587781c59f184312769619da06b55a58cd"} Sep 30 01:26:47 crc kubenswrapper[4809]: I0930 01:26:47.150031 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-4hp49" Sep 30 01:26:47 crc kubenswrapper[4809]: I0930 01:26:47.150037 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d5677be754e0586544293202410ba587781c59f184312769619da06b55a58cd" Sep 30 01:26:47 crc kubenswrapper[4809]: I0930 01:26:47.169080 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=5.130118592 podStartE2EDuration="6.169067356s" podCreationTimestamp="2025-09-30 01:26:41 +0000 UTC" firstStartedPulling="2025-09-30 01:26:43.669705374 +0000 UTC m=+4654.705954782" lastFinishedPulling="2025-09-30 01:26:44.708654138 +0000 UTC m=+4655.744903546" observedRunningTime="2025-09-30 01:26:47.162919998 +0000 UTC m=+4658.199169406" watchObservedRunningTime="2025-09-30 01:26:47.169067356 +0000 UTC m=+4658.205316764" Sep 30 01:26:47 crc kubenswrapper[4809]: I0930 01:26:47.304991 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5b44f7d95d-84wt2"] Sep 30 01:26:47 crc kubenswrapper[4809]: W0930 01:26:47.372590 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod818908b2_bd0c_49f5_a239_06ae4b04f236.slice/crio-e361d39366771750e288f0a08a02237658f33cd93bb946e2a272f570363a1316 WatchSource:0}: Error finding container e361d39366771750e288f0a08a02237658f33cd93bb946e2a272f570363a1316: Status 404 returned error can't find the container with id e361d39366771750e288f0a08a02237658f33cd93bb946e2a272f570363a1316 Sep 30 01:26:48 crc kubenswrapper[4809]: I0930 01:26:48.252923 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5b44f7d95d-84wt2" event={"ID":"818908b2-bd0c-49f5-a239-06ae4b04f236","Type":"ContainerStarted","Data":"e361d39366771750e288f0a08a02237658f33cd93bb946e2a272f570363a1316"} Sep 30 01:26:48 crc kubenswrapper[4809]: I0930 01:26:48.300913 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64c7465c7b-5tvxt" event={"ID":"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6","Type":"ContainerStarted","Data":"818e18ea01df8668112cb6d235b8151cfdd84d32e68299cb4cbbcbf0633a0061"} Sep 30 01:26:48 crc kubenswrapper[4809]: I0930 01:26:48.342540 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"16e7cb52-e504-487a-bf82-ca5aff7e7149","Type":"ContainerStarted","Data":"ecaec00fe909416f8d561d2c3cc811a85d0c0a72d8b082c48beddf2bc0e325a6"} Sep 30 01:26:48 crc kubenswrapper[4809]: I0930 01:26:48.367599 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"79549f56-3ac0-4bea-898b-c9b8c4d4eebb","Type":"ContainerStarted","Data":"713e761c25095e7582d0f3cfe5fd54da6e3eade8a7574c9b097e18c2ddad4895"} Sep 30 01:26:49 crc kubenswrapper[4809]: I0930 01:26:49.387869 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"16e7cb52-e504-487a-bf82-ca5aff7e7149","Type":"ContainerStarted","Data":"9dbe827e541e32407fd98b0a750d1eb654d3403e3d03e6a561f116e0a244d1a1"} Sep 30 01:26:49 crc kubenswrapper[4809]: I0930 01:26:49.388779 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="16e7cb52-e504-487a-bf82-ca5aff7e7149" containerName="glance-log" containerID="cri-o://ecaec00fe909416f8d561d2c3cc811a85d0c0a72d8b082c48beddf2bc0e325a6" gracePeriod=30 Sep 30 01:26:49 crc kubenswrapper[4809]: I0930 01:26:49.390493 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="16e7cb52-e504-487a-bf82-ca5aff7e7149" containerName="glance-httpd" containerID="cri-o://9dbe827e541e32407fd98b0a750d1eb654d3403e3d03e6a561f116e0a244d1a1" gracePeriod=30 Sep 30 01:26:49 crc kubenswrapper[4809]: I0930 01:26:49.398029 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"79549f56-3ac0-4bea-898b-c9b8c4d4eebb","Type":"ContainerStarted","Data":"23d5fea68c462601d59930784941c8ff8481cb101aaeef77e23c8ffda4f6b8a2"} Sep 30 01:26:49 crc kubenswrapper[4809]: I0930 01:26:49.398210 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="79549f56-3ac0-4bea-898b-c9b8c4d4eebb" containerName="glance-log" containerID="cri-o://713e761c25095e7582d0f3cfe5fd54da6e3eade8a7574c9b097e18c2ddad4895" gracePeriod=30 Sep 30 01:26:49 crc kubenswrapper[4809]: I0930 01:26:49.398452 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="79549f56-3ac0-4bea-898b-c9b8c4d4eebb" containerName="glance-httpd" containerID="cri-o://23d5fea68c462601d59930784941c8ff8481cb101aaeef77e23c8ffda4f6b8a2" gracePeriod=30 Sep 30 01:26:49 crc kubenswrapper[4809]: I0930 01:26:49.421358 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.421338951 podStartE2EDuration="5.421338951s" podCreationTimestamp="2025-09-30 01:26:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 01:26:49.411283795 +0000 UTC m=+4660.447533213" watchObservedRunningTime="2025-09-30 01:26:49.421338951 +0000 UTC m=+4660.457588359" Sep 30 01:26:49 crc kubenswrapper[4809]: I0930 01:26:49.455530 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.45550627 podStartE2EDuration="5.45550627s" podCreationTimestamp="2025-09-30 01:26:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 01:26:49.444011874 +0000 UTC m=+4660.480261312" watchObservedRunningTime="2025-09-30 01:26:49.45550627 +0000 UTC m=+4660.491755678" Sep 30 01:26:50 crc kubenswrapper[4809]: I0930 01:26:50.427234 4809 generic.go:334] "Generic (PLEG): container finished" podID="79549f56-3ac0-4bea-898b-c9b8c4d4eebb" containerID="23d5fea68c462601d59930784941c8ff8481cb101aaeef77e23c8ffda4f6b8a2" exitCode=0 Sep 30 01:26:50 crc kubenswrapper[4809]: I0930 01:26:50.427805 4809 generic.go:334] "Generic (PLEG): container finished" podID="79549f56-3ac0-4bea-898b-c9b8c4d4eebb" containerID="713e761c25095e7582d0f3cfe5fd54da6e3eade8a7574c9b097e18c2ddad4895" exitCode=143 Sep 30 01:26:50 crc kubenswrapper[4809]: I0930 01:26:50.427320 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"79549f56-3ac0-4bea-898b-c9b8c4d4eebb","Type":"ContainerDied","Data":"23d5fea68c462601d59930784941c8ff8481cb101aaeef77e23c8ffda4f6b8a2"} Sep 30 01:26:50 crc kubenswrapper[4809]: I0930 01:26:50.427875 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"79549f56-3ac0-4bea-898b-c9b8c4d4eebb","Type":"ContainerDied","Data":"713e761c25095e7582d0f3cfe5fd54da6e3eade8a7574c9b097e18c2ddad4895"} Sep 30 01:26:50 crc kubenswrapper[4809]: I0930 01:26:50.431177 4809 generic.go:334] "Generic (PLEG): container finished" podID="16e7cb52-e504-487a-bf82-ca5aff7e7149" containerID="9dbe827e541e32407fd98b0a750d1eb654d3403e3d03e6a561f116e0a244d1a1" exitCode=0 Sep 30 01:26:50 crc kubenswrapper[4809]: I0930 01:26:50.431204 4809 generic.go:334] "Generic (PLEG): container finished" podID="16e7cb52-e504-487a-bf82-ca5aff7e7149" containerID="ecaec00fe909416f8d561d2c3cc811a85d0c0a72d8b082c48beddf2bc0e325a6" exitCode=143 Sep 30 01:26:50 crc kubenswrapper[4809]: I0930 01:26:50.431234 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"16e7cb52-e504-487a-bf82-ca5aff7e7149","Type":"ContainerDied","Data":"9dbe827e541e32407fd98b0a750d1eb654d3403e3d03e6a561f116e0a244d1a1"} Sep 30 01:26:50 crc kubenswrapper[4809]: I0930 01:26:50.431277 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"16e7cb52-e504-487a-bf82-ca5aff7e7149","Type":"ContainerDied","Data":"ecaec00fe909416f8d561d2c3cc811a85d0c0a72d8b082c48beddf2bc0e325a6"} Sep 30 01:26:51 crc kubenswrapper[4809]: I0930 01:26:51.990613 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:52 crc kubenswrapper[4809]: I0930 01:26:52.220100 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Sep 30 01:26:52 crc kubenswrapper[4809]: I0930 01:26:52.252251 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Sep 30 01:26:54 crc kubenswrapper[4809]: I0930 01:26:54.892292 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 01:26:54 crc kubenswrapper[4809]: I0930 01:26:54.947081 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 01:26:54 crc kubenswrapper[4809]: I0930 01:26:54.964692 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-combined-ca-bundle\") pod \"16e7cb52-e504-487a-bf82-ca5aff7e7149\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " Sep 30 01:26:54 crc kubenswrapper[4809]: I0930 01:26:54.964809 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/16e7cb52-e504-487a-bf82-ca5aff7e7149-ceph\") pod \"16e7cb52-e504-487a-bf82-ca5aff7e7149\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " Sep 30 01:26:54 crc kubenswrapper[4809]: I0930 01:26:54.964908 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"16e7cb52-e504-487a-bf82-ca5aff7e7149\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " Sep 30 01:26:54 crc kubenswrapper[4809]: I0930 01:26:54.965033 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/16e7cb52-e504-487a-bf82-ca5aff7e7149-httpd-run\") pod \"16e7cb52-e504-487a-bf82-ca5aff7e7149\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " Sep 30 01:26:54 crc kubenswrapper[4809]: I0930 01:26:54.965064 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-scripts\") pod \"16e7cb52-e504-487a-bf82-ca5aff7e7149\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " Sep 30 01:26:54 crc kubenswrapper[4809]: I0930 01:26:54.965125 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16e7cb52-e504-487a-bf82-ca5aff7e7149-logs\") pod \"16e7cb52-e504-487a-bf82-ca5aff7e7149\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " Sep 30 01:26:54 crc kubenswrapper[4809]: I0930 01:26:54.965165 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-config-data\") pod \"16e7cb52-e504-487a-bf82-ca5aff7e7149\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " Sep 30 01:26:54 crc kubenswrapper[4809]: I0930 01:26:54.965300 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-public-tls-certs\") pod \"16e7cb52-e504-487a-bf82-ca5aff7e7149\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " Sep 30 01:26:54 crc kubenswrapper[4809]: I0930 01:26:54.965334 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gk5ls\" (UniqueName: \"kubernetes.io/projected/16e7cb52-e504-487a-bf82-ca5aff7e7149-kube-api-access-gk5ls\") pod \"16e7cb52-e504-487a-bf82-ca5aff7e7149\" (UID: \"16e7cb52-e504-487a-bf82-ca5aff7e7149\") " Sep 30 01:26:54 crc kubenswrapper[4809]: I0930 01:26:54.966527 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16e7cb52-e504-487a-bf82-ca5aff7e7149-logs" (OuterVolumeSpecName: "logs") pod "16e7cb52-e504-487a-bf82-ca5aff7e7149" (UID: "16e7cb52-e504-487a-bf82-ca5aff7e7149"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:26:54 crc kubenswrapper[4809]: I0930 01:26:54.966866 4809 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16e7cb52-e504-487a-bf82-ca5aff7e7149-logs\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:54 crc kubenswrapper[4809]: I0930 01:26:54.966979 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16e7cb52-e504-487a-bf82-ca5aff7e7149-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "16e7cb52-e504-487a-bf82-ca5aff7e7149" (UID: "16e7cb52-e504-487a-bf82-ca5aff7e7149"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:26:54 crc kubenswrapper[4809]: I0930 01:26:54.972834 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16e7cb52-e504-487a-bf82-ca5aff7e7149-ceph" (OuterVolumeSpecName: "ceph") pod "16e7cb52-e504-487a-bf82-ca5aff7e7149" (UID: "16e7cb52-e504-487a-bf82-ca5aff7e7149"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:26:54 crc kubenswrapper[4809]: I0930 01:26:54.976437 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "16e7cb52-e504-487a-bf82-ca5aff7e7149" (UID: "16e7cb52-e504-487a-bf82-ca5aff7e7149"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 01:26:54 crc kubenswrapper[4809]: I0930 01:26:54.978603 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-scripts" (OuterVolumeSpecName: "scripts") pod "16e7cb52-e504-487a-bf82-ca5aff7e7149" (UID: "16e7cb52-e504-487a-bf82-ca5aff7e7149"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:54 crc kubenswrapper[4809]: I0930 01:26:54.978815 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16e7cb52-e504-487a-bf82-ca5aff7e7149-kube-api-access-gk5ls" (OuterVolumeSpecName: "kube-api-access-gk5ls") pod "16e7cb52-e504-487a-bf82-ca5aff7e7149" (UID: "16e7cb52-e504-487a-bf82-ca5aff7e7149"). InnerVolumeSpecName "kube-api-access-gk5ls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.012986 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "16e7cb52-e504-487a-bf82-ca5aff7e7149" (UID: "16e7cb52-e504-487a-bf82-ca5aff7e7149"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.054686 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-config-data" (OuterVolumeSpecName: "config-data") pod "16e7cb52-e504-487a-bf82-ca5aff7e7149" (UID: "16e7cb52-e504-487a-bf82-ca5aff7e7149"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.069671 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-httpd-run\") pod \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.069745 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-scripts\") pod \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.069777 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8w9zw\" (UniqueName: \"kubernetes.io/projected/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-kube-api-access-8w9zw\") pod \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.069797 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-internal-tls-certs\") pod \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.069943 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-logs\") pod \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.069973 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-ceph\") pod \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.070074 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.070096 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-combined-ca-bundle\") pod \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.070167 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-config-data\") pod \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\" (UID: \"79549f56-3ac0-4bea-898b-c9b8c4d4eebb\") " Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.070715 4809 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.070756 4809 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/16e7cb52-e504-487a-bf82-ca5aff7e7149-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.070767 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.070775 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.070784 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gk5ls\" (UniqueName: \"kubernetes.io/projected/16e7cb52-e504-487a-bf82-ca5aff7e7149-kube-api-access-gk5ls\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.070795 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.070805 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/16e7cb52-e504-487a-bf82-ca5aff7e7149-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.071493 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "79549f56-3ac0-4bea-898b-c9b8c4d4eebb" (UID: "79549f56-3ac0-4bea-898b-c9b8c4d4eebb"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.071662 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-logs" (OuterVolumeSpecName: "logs") pod "79549f56-3ac0-4bea-898b-c9b8c4d4eebb" (UID: "79549f56-3ac0-4bea-898b-c9b8c4d4eebb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.082222 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-ceph" (OuterVolumeSpecName: "ceph") pod "79549f56-3ac0-4bea-898b-c9b8c4d4eebb" (UID: "79549f56-3ac0-4bea-898b-c9b8c4d4eebb"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.082907 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-scripts" (OuterVolumeSpecName: "scripts") pod "79549f56-3ac0-4bea-898b-c9b8c4d4eebb" (UID: "79549f56-3ac0-4bea-898b-c9b8c4d4eebb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.083747 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "79549f56-3ac0-4bea-898b-c9b8c4d4eebb" (UID: "79549f56-3ac0-4bea-898b-c9b8c4d4eebb"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.088713 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-kube-api-access-8w9zw" (OuterVolumeSpecName: "kube-api-access-8w9zw") pod "79549f56-3ac0-4bea-898b-c9b8c4d4eebb" (UID: "79549f56-3ac0-4bea-898b-c9b8c4d4eebb"). InnerVolumeSpecName "kube-api-access-8w9zw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.095709 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "16e7cb52-e504-487a-bf82-ca5aff7e7149" (UID: "16e7cb52-e504-487a-bf82-ca5aff7e7149"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.113397 4809 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.126213 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "79549f56-3ac0-4bea-898b-c9b8c4d4eebb" (UID: "79549f56-3ac0-4bea-898b-c9b8c4d4eebb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.175476 4809 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.175522 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.175535 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8w9zw\" (UniqueName: \"kubernetes.io/projected/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-kube-api-access-8w9zw\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.175548 4809 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/16e7cb52-e504-487a-bf82-ca5aff7e7149-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.175559 4809 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-logs\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.175569 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.175596 4809 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.175607 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.175620 4809 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.205825 4809 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.206671 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "79549f56-3ac0-4bea-898b-c9b8c4d4eebb" (UID: "79549f56-3ac0-4bea-898b-c9b8c4d4eebb"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.212242 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-config-data" (OuterVolumeSpecName: "config-data") pod "79549f56-3ac0-4bea-898b-c9b8c4d4eebb" (UID: "79549f56-3ac0-4bea-898b-c9b8c4d4eebb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.278085 4809 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.278138 4809 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.278159 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79549f56-3ac0-4bea-898b-c9b8c4d4eebb-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.487261 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55dcbdc977-npgqs" event={"ID":"56c24c30-a3af-4fb9-907b-6240a47687b0","Type":"ContainerStarted","Data":"d49eb8c99d66bc7ea646ab25dd1b69f824178c0b98a1222a7bbb65c610e5f6b5"} Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.487317 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55dcbdc977-npgqs" event={"ID":"56c24c30-a3af-4fb9-907b-6240a47687b0","Type":"ContainerStarted","Data":"ad16d68eebb2511cfd738bbbe5b63b8a16474e617e996c6022c048dd43fe3fa6"} Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.487345 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-55dcbdc977-npgqs" podUID="56c24c30-a3af-4fb9-907b-6240a47687b0" containerName="horizon-log" containerID="cri-o://ad16d68eebb2511cfd738bbbe5b63b8a16474e617e996c6022c048dd43fe3fa6" gracePeriod=30 Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.487438 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-55dcbdc977-npgqs" podUID="56c24c30-a3af-4fb9-907b-6240a47687b0" containerName="horizon" containerID="cri-o://d49eb8c99d66bc7ea646ab25dd1b69f824178c0b98a1222a7bbb65c610e5f6b5" gracePeriod=30 Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.489966 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"79549f56-3ac0-4bea-898b-c9b8c4d4eebb","Type":"ContainerDied","Data":"d59713d365f435b3ecc80c1671a76dc4ae310b063c00de9848dfddc7a56bd5dd"} Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.489999 4809 scope.go:117] "RemoveContainer" containerID="23d5fea68c462601d59930784941c8ff8481cb101aaeef77e23c8ffda4f6b8a2" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.490100 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.498296 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5b44f7d95d-84wt2" event={"ID":"818908b2-bd0c-49f5-a239-06ae4b04f236","Type":"ContainerStarted","Data":"9c6ba174222dc94af93a4a8610cea7e28557ff3799aafa329cf046da764333e1"} Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.498347 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5b44f7d95d-84wt2" event={"ID":"818908b2-bd0c-49f5-a239-06ae4b04f236","Type":"ContainerStarted","Data":"ed2346d75d3d10ff9d2bea9c95044ec259548ec523954e891b112ad41373dafe"} Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.507834 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64c7465c7b-5tvxt" event={"ID":"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6","Type":"ContainerStarted","Data":"24ccdff97e5103c902dd96e090324c8fb2e583c7ee9b8f2dc6502c2520ff71a5"} Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.507895 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64c7465c7b-5tvxt" event={"ID":"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6","Type":"ContainerStarted","Data":"05040e066a6d39d4dfee1a460b39966b674834bf20e71951ad0508f8106b35ef"} Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.510950 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-55dcbdc977-npgqs" podStartSLOduration=2.828763921 podStartE2EDuration="13.510932703s" podCreationTimestamp="2025-09-30 01:26:42 +0000 UTC" firstStartedPulling="2025-09-30 01:26:43.767332169 +0000 UTC m=+4654.803581577" lastFinishedPulling="2025-09-30 01:26:54.449500951 +0000 UTC m=+4665.485750359" observedRunningTime="2025-09-30 01:26:55.507679414 +0000 UTC m=+4666.543928812" watchObservedRunningTime="2025-09-30 01:26:55.510932703 +0000 UTC m=+4666.547182111" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.517241 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.517874 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"16e7cb52-e504-487a-bf82-ca5aff7e7149","Type":"ContainerDied","Data":"5d48cb14fd158a18c3048affd3ae1f1b74534b557331dd57baf101429b5a0304"} Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.522493 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b6f577d77-fcqjv" event={"ID":"c1b7a534-1f99-4f28-af81-d31c55d4e7f0","Type":"ContainerStarted","Data":"18d30499cc9527b4586f9a4080c5c8d84f78f7ad86185c3c3415614ce52c2830"} Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.522543 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b6f577d77-fcqjv" event={"ID":"c1b7a534-1f99-4f28-af81-d31c55d4e7f0","Type":"ContainerStarted","Data":"93364597e6dce5c641c152f5fcc304bc2d3a7c14beac710910b3167b81297d62"} Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.522680 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7b6f577d77-fcqjv" podUID="c1b7a534-1f99-4f28-af81-d31c55d4e7f0" containerName="horizon-log" containerID="cri-o://93364597e6dce5c641c152f5fcc304bc2d3a7c14beac710910b3167b81297d62" gracePeriod=30 Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.522855 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7b6f577d77-fcqjv" podUID="c1b7a534-1f99-4f28-af81-d31c55d4e7f0" containerName="horizon" containerID="cri-o://18d30499cc9527b4586f9a4080c5c8d84f78f7ad86185c3c3415614ce52c2830" gracePeriod=30 Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.526120 4809 scope.go:117] "RemoveContainer" containerID="713e761c25095e7582d0f3cfe5fd54da6e3eade8a7574c9b097e18c2ddad4895" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.533321 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5b44f7d95d-84wt2" podStartSLOduration=3.459363013 podStartE2EDuration="10.533293648s" podCreationTimestamp="2025-09-30 01:26:45 +0000 UTC" firstStartedPulling="2025-09-30 01:26:47.375899675 +0000 UTC m=+4658.412149083" lastFinishedPulling="2025-09-30 01:26:54.4498303 +0000 UTC m=+4665.486079718" observedRunningTime="2025-09-30 01:26:55.529462853 +0000 UTC m=+4666.565712261" watchObservedRunningTime="2025-09-30 01:26:55.533293648 +0000 UTC m=+4666.569543056" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.558458 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.562336 4809 scope.go:117] "RemoveContainer" containerID="9dbe827e541e32407fd98b0a750d1eb654d3403e3d03e6a561f116e0a244d1a1" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.573714 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.589474 4809 scope.go:117] "RemoveContainer" containerID="ecaec00fe909416f8d561d2c3cc811a85d0c0a72d8b082c48beddf2bc0e325a6" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.593754 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 01:26:55 crc kubenswrapper[4809]: E0930 01:26:55.594467 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16e7cb52-e504-487a-bf82-ca5aff7e7149" containerName="glance-httpd" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.594567 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="16e7cb52-e504-487a-bf82-ca5aff7e7149" containerName="glance-httpd" Sep 30 01:26:55 crc kubenswrapper[4809]: E0930 01:26:55.594676 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79549f56-3ac0-4bea-898b-c9b8c4d4eebb" containerName="glance-log" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.594767 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="79549f56-3ac0-4bea-898b-c9b8c4d4eebb" containerName="glance-log" Sep 30 01:26:55 crc kubenswrapper[4809]: E0930 01:26:55.594866 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79549f56-3ac0-4bea-898b-c9b8c4d4eebb" containerName="glance-httpd" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.594956 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="79549f56-3ac0-4bea-898b-c9b8c4d4eebb" containerName="glance-httpd" Sep 30 01:26:55 crc kubenswrapper[4809]: E0930 01:26:55.595091 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16e7cb52-e504-487a-bf82-ca5aff7e7149" containerName="glance-log" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.595176 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="16e7cb52-e504-487a-bf82-ca5aff7e7149" containerName="glance-log" Sep 30 01:26:55 crc kubenswrapper[4809]: E0930 01:26:55.595266 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf" containerName="mariadb-database-create" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.595344 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf" containerName="mariadb-database-create" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.595704 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf" containerName="mariadb-database-create" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.595827 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="16e7cb52-e504-487a-bf82-ca5aff7e7149" containerName="glance-httpd" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.595908 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="79549f56-3ac0-4bea-898b-c9b8c4d4eebb" containerName="glance-httpd" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.595994 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="79549f56-3ac0-4bea-898b-c9b8c4d4eebb" containerName="glance-log" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.596064 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="16e7cb52-e504-487a-bf82-ca5aff7e7149" containerName="glance-log" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.597516 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.604961 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.605194 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-dxw52" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.605352 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.605929 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.634233 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-64c7465c7b-5tvxt" podStartSLOduration=3.380537056 podStartE2EDuration="10.634214674s" podCreationTimestamp="2025-09-30 01:26:45 +0000 UTC" firstStartedPulling="2025-09-30 01:26:47.199515204 +0000 UTC m=+4658.235764612" lastFinishedPulling="2025-09-30 01:26:54.453192822 +0000 UTC m=+4665.489442230" observedRunningTime="2025-09-30 01:26:55.5831741 +0000 UTC m=+4666.619423528" watchObservedRunningTime="2025-09-30 01:26:55.634214674 +0000 UTC m=+4666.670464082" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.668964 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.670067 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7b6f577d77-fcqjv" podStartSLOduration=2.81965338 podStartE2EDuration="13.67005784s" podCreationTimestamp="2025-09-30 01:26:42 +0000 UTC" firstStartedPulling="2025-09-30 01:26:43.695833882 +0000 UTC m=+4654.732083290" lastFinishedPulling="2025-09-30 01:26:54.546238342 +0000 UTC m=+4665.582487750" observedRunningTime="2025-09-30 01:26:55.605579897 +0000 UTC m=+4666.641829305" watchObservedRunningTime="2025-09-30 01:26:55.67005784 +0000 UTC m=+4666.706307248" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.686693 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.686766 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/df6124d8-9505-4109-b1dc-a4e58382b4a3-logs\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.686794 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df6124d8-9505-4109-b1dc-a4e58382b4a3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.686820 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59z5w\" (UniqueName: \"kubernetes.io/projected/df6124d8-9505-4109-b1dc-a4e58382b4a3-kube-api-access-59z5w\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.686903 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/df6124d8-9505-4109-b1dc-a4e58382b4a3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.687047 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df6124d8-9505-4109-b1dc-a4e58382b4a3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.687079 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/df6124d8-9505-4109-b1dc-a4e58382b4a3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.687104 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df6124d8-9505-4109-b1dc-a4e58382b4a3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.687164 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/df6124d8-9505-4109-b1dc-a4e58382b4a3-ceph\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.713416 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79549f56-3ac0-4bea-898b-c9b8c4d4eebb" path="/var/lib/kubelet/pods/79549f56-3ac0-4bea-898b-c9b8c4d4eebb/volumes" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.714061 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.721505 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.735702 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.737556 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.739618 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.740708 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.787290 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.789937 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59z5w\" (UniqueName: \"kubernetes.io/projected/df6124d8-9505-4109-b1dc-a4e58382b4a3-kube-api-access-59z5w\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.789992 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6w2h\" (UniqueName: \"kubernetes.io/projected/3b8e8476-60e6-46aa-804a-67fc467166d2-kube-api-access-d6w2h\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.790034 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3b8e8476-60e6-46aa-804a-67fc467166d2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.790119 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b8e8476-60e6-46aa-804a-67fc467166d2-scripts\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.790164 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b8e8476-60e6-46aa-804a-67fc467166d2-logs\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.790230 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/df6124d8-9505-4109-b1dc-a4e58382b4a3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.790376 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b8e8476-60e6-46aa-804a-67fc467166d2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.790772 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b8e8476-60e6-46aa-804a-67fc467166d2-config-data\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.790869 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.790910 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3b8e8476-60e6-46aa-804a-67fc467166d2-ceph\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.790966 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df6124d8-9505-4109-b1dc-a4e58382b4a3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.790993 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/df6124d8-9505-4109-b1dc-a4e58382b4a3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.791031 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df6124d8-9505-4109-b1dc-a4e58382b4a3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.791147 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/df6124d8-9505-4109-b1dc-a4e58382b4a3-ceph\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.791302 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.791387 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b8e8476-60e6-46aa-804a-67fc467166d2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.791545 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/df6124d8-9505-4109-b1dc-a4e58382b4a3-logs\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.791612 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df6124d8-9505-4109-b1dc-a4e58382b4a3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.792866 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/df6124d8-9505-4109-b1dc-a4e58382b4a3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.794212 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.794383 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/df6124d8-9505-4109-b1dc-a4e58382b4a3-logs\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.812278 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/df6124d8-9505-4109-b1dc-a4e58382b4a3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.815424 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df6124d8-9505-4109-b1dc-a4e58382b4a3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.815490 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59z5w\" (UniqueName: \"kubernetes.io/projected/df6124d8-9505-4109-b1dc-a4e58382b4a3-kube-api-access-59z5w\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.816556 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df6124d8-9505-4109-b1dc-a4e58382b4a3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.823437 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/df6124d8-9505-4109-b1dc-a4e58382b4a3-ceph\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.823867 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.831853 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.832462 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df6124d8-9505-4109-b1dc-a4e58382b4a3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.848839 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"df6124d8-9505-4109-b1dc-a4e58382b4a3\") " pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.893355 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3b8e8476-60e6-46aa-804a-67fc467166d2-ceph\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.893507 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b8e8476-60e6-46aa-804a-67fc467166d2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.893558 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6w2h\" (UniqueName: \"kubernetes.io/projected/3b8e8476-60e6-46aa-804a-67fc467166d2-kube-api-access-d6w2h\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.893586 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3b8e8476-60e6-46aa-804a-67fc467166d2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.893634 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b8e8476-60e6-46aa-804a-67fc467166d2-scripts\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.893704 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b8e8476-60e6-46aa-804a-67fc467166d2-logs\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.893748 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b8e8476-60e6-46aa-804a-67fc467166d2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.893784 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b8e8476-60e6-46aa-804a-67fc467166d2-config-data\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.893825 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.894182 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.894900 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3b8e8476-60e6-46aa-804a-67fc467166d2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.896892 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b8e8476-60e6-46aa-804a-67fc467166d2-logs\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.900174 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b8e8476-60e6-46aa-804a-67fc467166d2-scripts\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.902609 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b8e8476-60e6-46aa-804a-67fc467166d2-config-data\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.904303 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3b8e8476-60e6-46aa-804a-67fc467166d2-ceph\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.915342 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b8e8476-60e6-46aa-804a-67fc467166d2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.921937 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.926220 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b8e8476-60e6-46aa-804a-67fc467166d2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.936439 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6w2h\" (UniqueName: \"kubernetes.io/projected/3b8e8476-60e6-46aa-804a-67fc467166d2-kube-api-access-d6w2h\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:55 crc kubenswrapper[4809]: I0930 01:26:55.948293 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"3b8e8476-60e6-46aa-804a-67fc467166d2\") " pod="openstack/glance-default-external-api-0" Sep 30 01:26:56 crc kubenswrapper[4809]: I0930 01:26:56.058063 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 01:26:56 crc kubenswrapper[4809]: I0930 01:26:56.502165 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 01:26:56 crc kubenswrapper[4809]: I0930 01:26:56.630001 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:56 crc kubenswrapper[4809]: I0930 01:26:56.630092 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:26:56 crc kubenswrapper[4809]: I0930 01:26:56.678066 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 01:26:57 crc kubenswrapper[4809]: W0930 01:26:57.321841 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf6124d8_9505_4109_b1dc_a4e58382b4a3.slice/crio-15e259cc63f20e77b5126a087fda77d297db60550822efa6ec6c50cf26012995 WatchSource:0}: Error finding container 15e259cc63f20e77b5126a087fda77d297db60550822efa6ec6c50cf26012995: Status 404 returned error can't find the container with id 15e259cc63f20e77b5126a087fda77d297db60550822efa6ec6c50cf26012995 Sep 30 01:26:57 crc kubenswrapper[4809]: I0930 01:26:57.561321 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"df6124d8-9505-4109-b1dc-a4e58382b4a3","Type":"ContainerStarted","Data":"15e259cc63f20e77b5126a087fda77d297db60550822efa6ec6c50cf26012995"} Sep 30 01:26:57 crc kubenswrapper[4809]: I0930 01:26:57.563487 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3b8e8476-60e6-46aa-804a-67fc467166d2","Type":"ContainerStarted","Data":"38afe9add8ef94b0c2cd66f017f3772784b79757e2e282b888fa0479e686f069"} Sep 30 01:26:57 crc kubenswrapper[4809]: I0930 01:26:57.703141 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16e7cb52-e504-487a-bf82-ca5aff7e7149" path="/var/lib/kubelet/pods/16e7cb52-e504-487a-bf82-ca5aff7e7149/volumes" Sep 30 01:26:58 crc kubenswrapper[4809]: I0930 01:26:58.574498 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"df6124d8-9505-4109-b1dc-a4e58382b4a3","Type":"ContainerStarted","Data":"05702ec45570aeb365f0b8aacc661bee9e3cf1a161e8de09d4b260a144835074"} Sep 30 01:26:58 crc kubenswrapper[4809]: I0930 01:26:58.576925 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3b8e8476-60e6-46aa-804a-67fc467166d2","Type":"ContainerStarted","Data":"4440a531f93097327a89bdfc1f05907ad79eb8cb70b5228d3ff5c83108667858"} Sep 30 01:26:59 crc kubenswrapper[4809]: I0930 01:26:59.594958 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"df6124d8-9505-4109-b1dc-a4e58382b4a3","Type":"ContainerStarted","Data":"058916d0da79846f19641de498e1aa7b18f59612c18fea51ccb952342c54b760"} Sep 30 01:26:59 crc kubenswrapper[4809]: I0930 01:26:59.599134 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3b8e8476-60e6-46aa-804a-67fc467166d2","Type":"ContainerStarted","Data":"a3c63530baf9e5fdbc612e1702bf2fd0ba3da93b83e4029db0f4aed4b07da350"} Sep 30 01:26:59 crc kubenswrapper[4809]: I0930 01:26:59.618994 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.618974866 podStartE2EDuration="4.618974866s" podCreationTimestamp="2025-09-30 01:26:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 01:26:59.618496213 +0000 UTC m=+4670.654745641" watchObservedRunningTime="2025-09-30 01:26:59.618974866 +0000 UTC m=+4670.655224294" Sep 30 01:26:59 crc kubenswrapper[4809]: I0930 01:26:59.650539 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.650521535 podStartE2EDuration="4.650521535s" podCreationTimestamp="2025-09-30 01:26:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 01:26:59.635858511 +0000 UTC m=+4670.672107929" watchObservedRunningTime="2025-09-30 01:26:59.650521535 +0000 UTC m=+4670.686770943" Sep 30 01:27:02 crc kubenswrapper[4809]: I0930 01:27:02.509695 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-9476-account-create-6ww2r"] Sep 30 01:27:02 crc kubenswrapper[4809]: I0930 01:27:02.511701 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-9476-account-create-6ww2r" Sep 30 01:27:02 crc kubenswrapper[4809]: I0930 01:27:02.515362 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Sep 30 01:27:02 crc kubenswrapper[4809]: I0930 01:27:02.520052 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-9476-account-create-6ww2r"] Sep 30 01:27:02 crc kubenswrapper[4809]: I0930 01:27:02.553567 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkqlx\" (UniqueName: \"kubernetes.io/projected/d79361f6-f824-40ff-afd1-ab2ca1ff5cb2-kube-api-access-nkqlx\") pod \"manila-9476-account-create-6ww2r\" (UID: \"d79361f6-f824-40ff-afd1-ab2ca1ff5cb2\") " pod="openstack/manila-9476-account-create-6ww2r" Sep 30 01:27:02 crc kubenswrapper[4809]: I0930 01:27:02.655191 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkqlx\" (UniqueName: \"kubernetes.io/projected/d79361f6-f824-40ff-afd1-ab2ca1ff5cb2-kube-api-access-nkqlx\") pod \"manila-9476-account-create-6ww2r\" (UID: \"d79361f6-f824-40ff-afd1-ab2ca1ff5cb2\") " pod="openstack/manila-9476-account-create-6ww2r" Sep 30 01:27:02 crc kubenswrapper[4809]: I0930 01:27:02.687407 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkqlx\" (UniqueName: \"kubernetes.io/projected/d79361f6-f824-40ff-afd1-ab2ca1ff5cb2-kube-api-access-nkqlx\") pod \"manila-9476-account-create-6ww2r\" (UID: \"d79361f6-f824-40ff-afd1-ab2ca1ff5cb2\") " pod="openstack/manila-9476-account-create-6ww2r" Sep 30 01:27:02 crc kubenswrapper[4809]: I0930 01:27:02.831693 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-9476-account-create-6ww2r" Sep 30 01:27:02 crc kubenswrapper[4809]: I0930 01:27:02.972466 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7b6f577d77-fcqjv" Sep 30 01:27:03 crc kubenswrapper[4809]: I0930 01:27:03.210133 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-55dcbdc977-npgqs" Sep 30 01:27:03 crc kubenswrapper[4809]: I0930 01:27:03.320875 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-9476-account-create-6ww2r"] Sep 30 01:27:03 crc kubenswrapper[4809]: I0930 01:27:03.647657 4809 generic.go:334] "Generic (PLEG): container finished" podID="d79361f6-f824-40ff-afd1-ab2ca1ff5cb2" containerID="ffe7b26062433c8ce583c1f85c2d7f5ff370ae7d6cfb7bba1f14fcc9818f9ae1" exitCode=0 Sep 30 01:27:03 crc kubenswrapper[4809]: I0930 01:27:03.647691 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-9476-account-create-6ww2r" event={"ID":"d79361f6-f824-40ff-afd1-ab2ca1ff5cb2","Type":"ContainerDied","Data":"ffe7b26062433c8ce583c1f85c2d7f5ff370ae7d6cfb7bba1f14fcc9818f9ae1"} Sep 30 01:27:03 crc kubenswrapper[4809]: I0930 01:27:03.647963 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-9476-account-create-6ww2r" event={"ID":"d79361f6-f824-40ff-afd1-ab2ca1ff5cb2","Type":"ContainerStarted","Data":"063a6507ecd94c5ee974e6a6ad77408c2c14fd1ecc7413f76f65dca840b6d9a8"} Sep 30 01:27:05 crc kubenswrapper[4809]: I0930 01:27:05.127955 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-9476-account-create-6ww2r" Sep 30 01:27:05 crc kubenswrapper[4809]: I0930 01:27:05.207782 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkqlx\" (UniqueName: \"kubernetes.io/projected/d79361f6-f824-40ff-afd1-ab2ca1ff5cb2-kube-api-access-nkqlx\") pod \"d79361f6-f824-40ff-afd1-ab2ca1ff5cb2\" (UID: \"d79361f6-f824-40ff-afd1-ab2ca1ff5cb2\") " Sep 30 01:27:05 crc kubenswrapper[4809]: I0930 01:27:05.216076 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d79361f6-f824-40ff-afd1-ab2ca1ff5cb2-kube-api-access-nkqlx" (OuterVolumeSpecName: "kube-api-access-nkqlx") pod "d79361f6-f824-40ff-afd1-ab2ca1ff5cb2" (UID: "d79361f6-f824-40ff-afd1-ab2ca1ff5cb2"). InnerVolumeSpecName "kube-api-access-nkqlx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:27:05 crc kubenswrapper[4809]: I0930 01:27:05.310229 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkqlx\" (UniqueName: \"kubernetes.io/projected/d79361f6-f824-40ff-afd1-ab2ca1ff5cb2-kube-api-access-nkqlx\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:05 crc kubenswrapper[4809]: I0930 01:27:05.681169 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-9476-account-create-6ww2r" event={"ID":"d79361f6-f824-40ff-afd1-ab2ca1ff5cb2","Type":"ContainerDied","Data":"063a6507ecd94c5ee974e6a6ad77408c2c14fd1ecc7413f76f65dca840b6d9a8"} Sep 30 01:27:05 crc kubenswrapper[4809]: I0930 01:27:05.681210 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="063a6507ecd94c5ee974e6a6ad77408c2c14fd1ecc7413f76f65dca840b6d9a8" Sep 30 01:27:05 crc kubenswrapper[4809]: I0930 01:27:05.681266 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-9476-account-create-6ww2r" Sep 30 01:27:05 crc kubenswrapper[4809]: I0930 01:27:05.825449 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-64c7465c7b-5tvxt" podUID="9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.66:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.66:8443: connect: connection refused" Sep 30 01:27:05 crc kubenswrapper[4809]: I0930 01:27:05.923316 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 01:27:05 crc kubenswrapper[4809]: I0930 01:27:05.923372 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 01:27:05 crc kubenswrapper[4809]: I0930 01:27:05.958194 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 01:27:05 crc kubenswrapper[4809]: I0930 01:27:05.992313 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 01:27:06 crc kubenswrapper[4809]: I0930 01:27:06.059953 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 01:27:06 crc kubenswrapper[4809]: I0930 01:27:06.059997 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 01:27:06 crc kubenswrapper[4809]: I0930 01:27:06.102962 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 01:27:06 crc kubenswrapper[4809]: I0930 01:27:06.110487 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 01:27:06 crc kubenswrapper[4809]: I0930 01:27:06.631993 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5b44f7d95d-84wt2" podUID="818908b2-bd0c-49f5-a239-06ae4b04f236" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.67:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.67:8443: connect: connection refused" Sep 30 01:27:06 crc kubenswrapper[4809]: I0930 01:27:06.699718 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 01:27:06 crc kubenswrapper[4809]: I0930 01:27:06.700616 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 01:27:06 crc kubenswrapper[4809]: I0930 01:27:06.701142 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 01:27:06 crc kubenswrapper[4809]: I0930 01:27:06.703461 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 01:27:07 crc kubenswrapper[4809]: I0930 01:27:07.859713 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-6ct68"] Sep 30 01:27:07 crc kubenswrapper[4809]: E0930 01:27:07.861002 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d79361f6-f824-40ff-afd1-ab2ca1ff5cb2" containerName="mariadb-account-create" Sep 30 01:27:07 crc kubenswrapper[4809]: I0930 01:27:07.861020 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="d79361f6-f824-40ff-afd1-ab2ca1ff5cb2" containerName="mariadb-account-create" Sep 30 01:27:07 crc kubenswrapper[4809]: I0930 01:27:07.861302 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="d79361f6-f824-40ff-afd1-ab2ca1ff5cb2" containerName="mariadb-account-create" Sep 30 01:27:07 crc kubenswrapper[4809]: I0930 01:27:07.862241 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-6ct68" Sep 30 01:27:07 crc kubenswrapper[4809]: I0930 01:27:07.864973 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-mrv5p" Sep 30 01:27:07 crc kubenswrapper[4809]: I0930 01:27:07.869398 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Sep 30 01:27:07 crc kubenswrapper[4809]: I0930 01:27:07.873146 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-6ct68"] Sep 30 01:27:07 crc kubenswrapper[4809]: I0930 01:27:07.992724 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-combined-ca-bundle\") pod \"manila-db-sync-6ct68\" (UID: \"dc535060-3a0f-47f2-b7d7-1bbb28a367dc\") " pod="openstack/manila-db-sync-6ct68" Sep 30 01:27:07 crc kubenswrapper[4809]: I0930 01:27:07.992768 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-config-data\") pod \"manila-db-sync-6ct68\" (UID: \"dc535060-3a0f-47f2-b7d7-1bbb28a367dc\") " pod="openstack/manila-db-sync-6ct68" Sep 30 01:27:07 crc kubenswrapper[4809]: I0930 01:27:07.992858 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-job-config-data\") pod \"manila-db-sync-6ct68\" (UID: \"dc535060-3a0f-47f2-b7d7-1bbb28a367dc\") " pod="openstack/manila-db-sync-6ct68" Sep 30 01:27:07 crc kubenswrapper[4809]: I0930 01:27:07.992880 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9k4j\" (UniqueName: \"kubernetes.io/projected/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-kube-api-access-j9k4j\") pod \"manila-db-sync-6ct68\" (UID: \"dc535060-3a0f-47f2-b7d7-1bbb28a367dc\") " pod="openstack/manila-db-sync-6ct68" Sep 30 01:27:08 crc kubenswrapper[4809]: I0930 01:27:08.094574 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-job-config-data\") pod \"manila-db-sync-6ct68\" (UID: \"dc535060-3a0f-47f2-b7d7-1bbb28a367dc\") " pod="openstack/manila-db-sync-6ct68" Sep 30 01:27:08 crc kubenswrapper[4809]: I0930 01:27:08.094630 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9k4j\" (UniqueName: \"kubernetes.io/projected/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-kube-api-access-j9k4j\") pod \"manila-db-sync-6ct68\" (UID: \"dc535060-3a0f-47f2-b7d7-1bbb28a367dc\") " pod="openstack/manila-db-sync-6ct68" Sep 30 01:27:08 crc kubenswrapper[4809]: I0930 01:27:08.094749 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-combined-ca-bundle\") pod \"manila-db-sync-6ct68\" (UID: \"dc535060-3a0f-47f2-b7d7-1bbb28a367dc\") " pod="openstack/manila-db-sync-6ct68" Sep 30 01:27:08 crc kubenswrapper[4809]: I0930 01:27:08.094774 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-config-data\") pod \"manila-db-sync-6ct68\" (UID: \"dc535060-3a0f-47f2-b7d7-1bbb28a367dc\") " pod="openstack/manila-db-sync-6ct68" Sep 30 01:27:08 crc kubenswrapper[4809]: I0930 01:27:08.111291 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-job-config-data\") pod \"manila-db-sync-6ct68\" (UID: \"dc535060-3a0f-47f2-b7d7-1bbb28a367dc\") " pod="openstack/manila-db-sync-6ct68" Sep 30 01:27:08 crc kubenswrapper[4809]: I0930 01:27:08.111706 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-config-data\") pod \"manila-db-sync-6ct68\" (UID: \"dc535060-3a0f-47f2-b7d7-1bbb28a367dc\") " pod="openstack/manila-db-sync-6ct68" Sep 30 01:27:08 crc kubenswrapper[4809]: I0930 01:27:08.128284 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-combined-ca-bundle\") pod \"manila-db-sync-6ct68\" (UID: \"dc535060-3a0f-47f2-b7d7-1bbb28a367dc\") " pod="openstack/manila-db-sync-6ct68" Sep 30 01:27:08 crc kubenswrapper[4809]: I0930 01:27:08.137176 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9k4j\" (UniqueName: \"kubernetes.io/projected/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-kube-api-access-j9k4j\") pod \"manila-db-sync-6ct68\" (UID: \"dc535060-3a0f-47f2-b7d7-1bbb28a367dc\") " pod="openstack/manila-db-sync-6ct68" Sep 30 01:27:08 crc kubenswrapper[4809]: I0930 01:27:08.190174 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-6ct68" Sep 30 01:27:08 crc kubenswrapper[4809]: I0930 01:27:08.716179 4809 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 01:27:08 crc kubenswrapper[4809]: I0930 01:27:08.716757 4809 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 01:27:08 crc kubenswrapper[4809]: I0930 01:27:08.847360 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-6ct68"] Sep 30 01:27:08 crc kubenswrapper[4809]: W0930 01:27:08.847837 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc535060_3a0f_47f2_b7d7_1bbb28a367dc.slice/crio-112ac253aa372808b8baea8ffa19d2509137dd7d60f61744fd070f51f7d30514 WatchSource:0}: Error finding container 112ac253aa372808b8baea8ffa19d2509137dd7d60f61744fd070f51f7d30514: Status 404 returned error can't find the container with id 112ac253aa372808b8baea8ffa19d2509137dd7d60f61744fd070f51f7d30514 Sep 30 01:27:09 crc kubenswrapper[4809]: I0930 01:27:09.734914 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-6ct68" event={"ID":"dc535060-3a0f-47f2-b7d7-1bbb28a367dc","Type":"ContainerStarted","Data":"112ac253aa372808b8baea8ffa19d2509137dd7d60f61744fd070f51f7d30514"} Sep 30 01:27:10 crc kubenswrapper[4809]: I0930 01:27:10.041364 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 01:27:10 crc kubenswrapper[4809]: I0930 01:27:10.041619 4809 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 01:27:10 crc kubenswrapper[4809]: I0930 01:27:10.049695 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 01:27:10 crc kubenswrapper[4809]: I0930 01:27:10.108224 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 01:27:10 crc kubenswrapper[4809]: I0930 01:27:10.108267 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 01:27:15 crc kubenswrapper[4809]: I0930 01:27:15.800949 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-6ct68" event={"ID":"dc535060-3a0f-47f2-b7d7-1bbb28a367dc","Type":"ContainerStarted","Data":"89e3ddaedb672590f4c9e97301af5d8748e4ea33d2a2a7d43353ccb61522f12e"} Sep 30 01:27:15 crc kubenswrapper[4809]: I0930 01:27:15.826360 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-6ct68" podStartSLOduration=2.654450023 podStartE2EDuration="8.826333628s" podCreationTimestamp="2025-09-30 01:27:07 +0000 UTC" firstStartedPulling="2025-09-30 01:27:08.850663906 +0000 UTC m=+4679.886913314" lastFinishedPulling="2025-09-30 01:27:15.022547501 +0000 UTC m=+4686.058796919" observedRunningTime="2025-09-30 01:27:15.821720751 +0000 UTC m=+4686.857970199" watchObservedRunningTime="2025-09-30 01:27:15.826333628 +0000 UTC m=+4686.862583076" Sep 30 01:27:18 crc kubenswrapper[4809]: I0930 01:27:18.285156 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:27:18 crc kubenswrapper[4809]: I0930 01:27:18.395247 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:27:19 crc kubenswrapper[4809]: I0930 01:27:19.957204 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-5b44f7d95d-84wt2" Sep 30 01:27:19 crc kubenswrapper[4809]: I0930 01:27:19.968698 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:27:20 crc kubenswrapper[4809]: I0930 01:27:20.045227 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-64c7465c7b-5tvxt"] Sep 30 01:27:20 crc kubenswrapper[4809]: I0930 01:27:20.861066 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-64c7465c7b-5tvxt" podUID="9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" containerName="horizon-log" containerID="cri-o://05040e066a6d39d4dfee1a460b39966b674834bf20e71951ad0508f8106b35ef" gracePeriod=30 Sep 30 01:27:20 crc kubenswrapper[4809]: I0930 01:27:20.861176 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-64c7465c7b-5tvxt" podUID="9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" containerName="horizon" containerID="cri-o://24ccdff97e5103c902dd96e090324c8fb2e583c7ee9b8f2dc6502c2520ff71a5" gracePeriod=30 Sep 30 01:27:24 crc kubenswrapper[4809]: I0930 01:27:24.905143 4809 generic.go:334] "Generic (PLEG): container finished" podID="9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" containerID="24ccdff97e5103c902dd96e090324c8fb2e583c7ee9b8f2dc6502c2520ff71a5" exitCode=0 Sep 30 01:27:24 crc kubenswrapper[4809]: I0930 01:27:24.905251 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64c7465c7b-5tvxt" event={"ID":"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6","Type":"ContainerDied","Data":"24ccdff97e5103c902dd96e090324c8fb2e583c7ee9b8f2dc6502c2520ff71a5"} Sep 30 01:27:24 crc kubenswrapper[4809]: I0930 01:27:24.909363 4809 generic.go:334] "Generic (PLEG): container finished" podID="dc535060-3a0f-47f2-b7d7-1bbb28a367dc" containerID="89e3ddaedb672590f4c9e97301af5d8748e4ea33d2a2a7d43353ccb61522f12e" exitCode=0 Sep 30 01:27:24 crc kubenswrapper[4809]: I0930 01:27:24.909430 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-6ct68" event={"ID":"dc535060-3a0f-47f2-b7d7-1bbb28a367dc","Type":"ContainerDied","Data":"89e3ddaedb672590f4c9e97301af5d8748e4ea33d2a2a7d43353ccb61522f12e"} Sep 30 01:27:25 crc kubenswrapper[4809]: I0930 01:27:25.824867 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-64c7465c7b-5tvxt" podUID="9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.66:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.66:8443: connect: connection refused" Sep 30 01:27:25 crc kubenswrapper[4809]: I0930 01:27:25.926953 4809 generic.go:334] "Generic (PLEG): container finished" podID="56c24c30-a3af-4fb9-907b-6240a47687b0" containerID="d49eb8c99d66bc7ea646ab25dd1b69f824178c0b98a1222a7bbb65c610e5f6b5" exitCode=137 Sep 30 01:27:25 crc kubenswrapper[4809]: I0930 01:27:25.926993 4809 generic.go:334] "Generic (PLEG): container finished" podID="56c24c30-a3af-4fb9-907b-6240a47687b0" containerID="ad16d68eebb2511cfd738bbbe5b63b8a16474e617e996c6022c048dd43fe3fa6" exitCode=137 Sep 30 01:27:25 crc kubenswrapper[4809]: I0930 01:27:25.927034 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55dcbdc977-npgqs" event={"ID":"56c24c30-a3af-4fb9-907b-6240a47687b0","Type":"ContainerDied","Data":"d49eb8c99d66bc7ea646ab25dd1b69f824178c0b98a1222a7bbb65c610e5f6b5"} Sep 30 01:27:25 crc kubenswrapper[4809]: I0930 01:27:25.927058 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55dcbdc977-npgqs" event={"ID":"56c24c30-a3af-4fb9-907b-6240a47687b0","Type":"ContainerDied","Data":"ad16d68eebb2511cfd738bbbe5b63b8a16474e617e996c6022c048dd43fe3fa6"} Sep 30 01:27:25 crc kubenswrapper[4809]: I0930 01:27:25.932099 4809 generic.go:334] "Generic (PLEG): container finished" podID="c1b7a534-1f99-4f28-af81-d31c55d4e7f0" containerID="18d30499cc9527b4586f9a4080c5c8d84f78f7ad86185c3c3415614ce52c2830" exitCode=137 Sep 30 01:27:25 crc kubenswrapper[4809]: I0930 01:27:25.932140 4809 generic.go:334] "Generic (PLEG): container finished" podID="c1b7a534-1f99-4f28-af81-d31c55d4e7f0" containerID="93364597e6dce5c641c152f5fcc304bc2d3a7c14beac710910b3167b81297d62" exitCode=137 Sep 30 01:27:25 crc kubenswrapper[4809]: I0930 01:27:25.932191 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b6f577d77-fcqjv" event={"ID":"c1b7a534-1f99-4f28-af81-d31c55d4e7f0","Type":"ContainerDied","Data":"18d30499cc9527b4586f9a4080c5c8d84f78f7ad86185c3c3415614ce52c2830"} Sep 30 01:27:25 crc kubenswrapper[4809]: I0930 01:27:25.932250 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b6f577d77-fcqjv" event={"ID":"c1b7a534-1f99-4f28-af81-d31c55d4e7f0","Type":"ContainerDied","Data":"93364597e6dce5c641c152f5fcc304bc2d3a7c14beac710910b3167b81297d62"} Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.145857 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-55dcbdc977-npgqs" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.159259 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b6f577d77-fcqjv" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.244145 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbsfr\" (UniqueName: \"kubernetes.io/projected/56c24c30-a3af-4fb9-907b-6240a47687b0-kube-api-access-cbsfr\") pod \"56c24c30-a3af-4fb9-907b-6240a47687b0\" (UID: \"56c24c30-a3af-4fb9-907b-6240a47687b0\") " Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.244516 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/56c24c30-a3af-4fb9-907b-6240a47687b0-config-data\") pod \"56c24c30-a3af-4fb9-907b-6240a47687b0\" (UID: \"56c24c30-a3af-4fb9-907b-6240a47687b0\") " Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.244572 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/56c24c30-a3af-4fb9-907b-6240a47687b0-scripts\") pod \"56c24c30-a3af-4fb9-907b-6240a47687b0\" (UID: \"56c24c30-a3af-4fb9-907b-6240a47687b0\") " Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.244814 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56c24c30-a3af-4fb9-907b-6240a47687b0-logs\") pod \"56c24c30-a3af-4fb9-907b-6240a47687b0\" (UID: \"56c24c30-a3af-4fb9-907b-6240a47687b0\") " Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.244883 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/56c24c30-a3af-4fb9-907b-6240a47687b0-horizon-secret-key\") pod \"56c24c30-a3af-4fb9-907b-6240a47687b0\" (UID: \"56c24c30-a3af-4fb9-907b-6240a47687b0\") " Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.250405 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56c24c30-a3af-4fb9-907b-6240a47687b0-logs" (OuterVolumeSpecName: "logs") pod "56c24c30-a3af-4fb9-907b-6240a47687b0" (UID: "56c24c30-a3af-4fb9-907b-6240a47687b0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.254772 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56c24c30-a3af-4fb9-907b-6240a47687b0-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "56c24c30-a3af-4fb9-907b-6240a47687b0" (UID: "56c24c30-a3af-4fb9-907b-6240a47687b0"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.255596 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56c24c30-a3af-4fb9-907b-6240a47687b0-kube-api-access-cbsfr" (OuterVolumeSpecName: "kube-api-access-cbsfr") pod "56c24c30-a3af-4fb9-907b-6240a47687b0" (UID: "56c24c30-a3af-4fb9-907b-6240a47687b0"). InnerVolumeSpecName "kube-api-access-cbsfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.284424 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56c24c30-a3af-4fb9-907b-6240a47687b0-scripts" (OuterVolumeSpecName: "scripts") pod "56c24c30-a3af-4fb9-907b-6240a47687b0" (UID: "56c24c30-a3af-4fb9-907b-6240a47687b0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.284519 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56c24c30-a3af-4fb9-907b-6240a47687b0-config-data" (OuterVolumeSpecName: "config-data") pod "56c24c30-a3af-4fb9-907b-6240a47687b0" (UID: "56c24c30-a3af-4fb9-907b-6240a47687b0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.346608 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-config-data\") pod \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\" (UID: \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\") " Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.346676 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-logs\") pod \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\" (UID: \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\") " Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.346768 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-horizon-secret-key\") pod \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\" (UID: \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\") " Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.346789 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-scripts\") pod \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\" (UID: \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\") " Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.346824 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fc4t\" (UniqueName: \"kubernetes.io/projected/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-kube-api-access-5fc4t\") pod \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\" (UID: \"c1b7a534-1f99-4f28-af81-d31c55d4e7f0\") " Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.347313 4809 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56c24c30-a3af-4fb9-907b-6240a47687b0-logs\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.347325 4809 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/56c24c30-a3af-4fb9-907b-6240a47687b0-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.347334 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbsfr\" (UniqueName: \"kubernetes.io/projected/56c24c30-a3af-4fb9-907b-6240a47687b0-kube-api-access-cbsfr\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.347344 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/56c24c30-a3af-4fb9-907b-6240a47687b0-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.347355 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/56c24c30-a3af-4fb9-907b-6240a47687b0-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.349050 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-logs" (OuterVolumeSpecName: "logs") pod "c1b7a534-1f99-4f28-af81-d31c55d4e7f0" (UID: "c1b7a534-1f99-4f28-af81-d31c55d4e7f0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.356351 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "c1b7a534-1f99-4f28-af81-d31c55d4e7f0" (UID: "c1b7a534-1f99-4f28-af81-d31c55d4e7f0"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.356520 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-kube-api-access-5fc4t" (OuterVolumeSpecName: "kube-api-access-5fc4t") pod "c1b7a534-1f99-4f28-af81-d31c55d4e7f0" (UID: "c1b7a534-1f99-4f28-af81-d31c55d4e7f0"). InnerVolumeSpecName "kube-api-access-5fc4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.389883 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-config-data" (OuterVolumeSpecName: "config-data") pod "c1b7a534-1f99-4f28-af81-d31c55d4e7f0" (UID: "c1b7a534-1f99-4f28-af81-d31c55d4e7f0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.411985 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-6ct68" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.414300 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-scripts" (OuterVolumeSpecName: "scripts") pod "c1b7a534-1f99-4f28-af81-d31c55d4e7f0" (UID: "c1b7a534-1f99-4f28-af81-d31c55d4e7f0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.449706 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.449739 4809 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-logs\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.449751 4809 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.449762 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.449773 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fc4t\" (UniqueName: \"kubernetes.io/projected/c1b7a534-1f99-4f28-af81-d31c55d4e7f0-kube-api-access-5fc4t\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.551471 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-config-data\") pod \"dc535060-3a0f-47f2-b7d7-1bbb28a367dc\" (UID: \"dc535060-3a0f-47f2-b7d7-1bbb28a367dc\") " Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.551585 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-combined-ca-bundle\") pod \"dc535060-3a0f-47f2-b7d7-1bbb28a367dc\" (UID: \"dc535060-3a0f-47f2-b7d7-1bbb28a367dc\") " Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.551767 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j9k4j\" (UniqueName: \"kubernetes.io/projected/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-kube-api-access-j9k4j\") pod \"dc535060-3a0f-47f2-b7d7-1bbb28a367dc\" (UID: \"dc535060-3a0f-47f2-b7d7-1bbb28a367dc\") " Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.551798 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-job-config-data\") pod \"dc535060-3a0f-47f2-b7d7-1bbb28a367dc\" (UID: \"dc535060-3a0f-47f2-b7d7-1bbb28a367dc\") " Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.558877 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "dc535060-3a0f-47f2-b7d7-1bbb28a367dc" (UID: "dc535060-3a0f-47f2-b7d7-1bbb28a367dc"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.560123 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-kube-api-access-j9k4j" (OuterVolumeSpecName: "kube-api-access-j9k4j") pod "dc535060-3a0f-47f2-b7d7-1bbb28a367dc" (UID: "dc535060-3a0f-47f2-b7d7-1bbb28a367dc"). InnerVolumeSpecName "kube-api-access-j9k4j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.561520 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-config-data" (OuterVolumeSpecName: "config-data") pod "dc535060-3a0f-47f2-b7d7-1bbb28a367dc" (UID: "dc535060-3a0f-47f2-b7d7-1bbb28a367dc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.603436 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc535060-3a0f-47f2-b7d7-1bbb28a367dc" (UID: "dc535060-3a0f-47f2-b7d7-1bbb28a367dc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.656091 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.656133 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.656155 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j9k4j\" (UniqueName: \"kubernetes.io/projected/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-kube-api-access-j9k4j\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.656172 4809 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/dc535060-3a0f-47f2-b7d7-1bbb28a367dc-job-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.949753 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b6f577d77-fcqjv" event={"ID":"c1b7a534-1f99-4f28-af81-d31c55d4e7f0","Type":"ContainerDied","Data":"14e2d9a83169b1f2799905e66d8b4ec719b5f0e8e7cfcc4f47f5b57992c3b948"} Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.949841 4809 scope.go:117] "RemoveContainer" containerID="18d30499cc9527b4586f9a4080c5c8d84f78f7ad86185c3c3415614ce52c2830" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.950073 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b6f577d77-fcqjv" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.959340 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-6ct68" event={"ID":"dc535060-3a0f-47f2-b7d7-1bbb28a367dc","Type":"ContainerDied","Data":"112ac253aa372808b8baea8ffa19d2509137dd7d60f61744fd070f51f7d30514"} Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.959385 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="112ac253aa372808b8baea8ffa19d2509137dd7d60f61744fd070f51f7d30514" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.959471 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-6ct68" Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.964911 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55dcbdc977-npgqs" event={"ID":"56c24c30-a3af-4fb9-907b-6240a47687b0","Type":"ContainerDied","Data":"ef14dc68c6b36c654e90d790610cf0ad9aaae8309772eb8735eda1869944ec2f"} Sep 30 01:27:26 crc kubenswrapper[4809]: I0930 01:27:26.965048 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-55dcbdc977-npgqs" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.014573 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-55dcbdc977-npgqs"] Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.025144 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-55dcbdc977-npgqs"] Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.038726 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7b6f577d77-fcqjv"] Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.046542 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7b6f577d77-fcqjv"] Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.192945 4809 scope.go:117] "RemoveContainer" containerID="93364597e6dce5c641c152f5fcc304bc2d3a7c14beac710910b3167b81297d62" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.264607 4809 scope.go:117] "RemoveContainer" containerID="d49eb8c99d66bc7ea646ab25dd1b69f824178c0b98a1222a7bbb65c610e5f6b5" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.300056 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Sep 30 01:27:27 crc kubenswrapper[4809]: E0930 01:27:27.300555 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56c24c30-a3af-4fb9-907b-6240a47687b0" containerName="horizon-log" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.300569 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="56c24c30-a3af-4fb9-907b-6240a47687b0" containerName="horizon-log" Sep 30 01:27:27 crc kubenswrapper[4809]: E0930 01:27:27.300597 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1b7a534-1f99-4f28-af81-d31c55d4e7f0" containerName="horizon-log" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.300604 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1b7a534-1f99-4f28-af81-d31c55d4e7f0" containerName="horizon-log" Sep 30 01:27:27 crc kubenswrapper[4809]: E0930 01:27:27.300628 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1b7a534-1f99-4f28-af81-d31c55d4e7f0" containerName="horizon" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.300634 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1b7a534-1f99-4f28-af81-d31c55d4e7f0" containerName="horizon" Sep 30 01:27:27 crc kubenswrapper[4809]: E0930 01:27:27.300658 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56c24c30-a3af-4fb9-907b-6240a47687b0" containerName="horizon" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.300664 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="56c24c30-a3af-4fb9-907b-6240a47687b0" containerName="horizon" Sep 30 01:27:27 crc kubenswrapper[4809]: E0930 01:27:27.300676 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc535060-3a0f-47f2-b7d7-1bbb28a367dc" containerName="manila-db-sync" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.300681 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc535060-3a0f-47f2-b7d7-1bbb28a367dc" containerName="manila-db-sync" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.300888 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc535060-3a0f-47f2-b7d7-1bbb28a367dc" containerName="manila-db-sync" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.300902 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="56c24c30-a3af-4fb9-907b-6240a47687b0" containerName="horizon-log" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.300921 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1b7a534-1f99-4f28-af81-d31c55d4e7f0" containerName="horizon-log" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.300936 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="56c24c30-a3af-4fb9-907b-6240a47687b0" containerName="horizon" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.300946 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1b7a534-1f99-4f28-af81-d31c55d4e7f0" containerName="horizon" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.302776 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.307373 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.307696 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.307843 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.307985 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-mrv5p" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.310202 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.312139 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.319369 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.319986 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.330154 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.374922 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/9c58816c-9134-4995-aaa0-6703afaf058c-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.375533 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c58816c-9134-4995-aaa0-6703afaf058c-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.375588 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9c58816c-9134-4995-aaa0-6703afaf058c-ceph\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.375609 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-scripts\") pod \"manila-scheduler-0\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.375734 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.375752 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.375799 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-config-data\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.375846 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.375897 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-scripts\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.375915 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7dhn\" (UniqueName: \"kubernetes.io/projected/9c58816c-9134-4995-aaa0-6703afaf058c-kube-api-access-z7dhn\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.376069 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.376570 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-config-data\") pod \"manila-scheduler-0\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.376612 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.376657 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fflnv\" (UniqueName: \"kubernetes.io/projected/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-kube-api-access-fflnv\") pod \"manila-scheduler-0\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.439699 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74cfff99f-4rtdc"] Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.442056 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.458447 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74cfff99f-4rtdc"] Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.479904 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.479995 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-dns-swift-storage-0\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.480026 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-config-data\") pod \"manila-scheduler-0\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.480087 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.480106 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fflnv\" (UniqueName: \"kubernetes.io/projected/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-kube-api-access-fflnv\") pod \"manila-scheduler-0\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.480148 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/9c58816c-9134-4995-aaa0-6703afaf058c-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.480237 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljmgx\" (UniqueName: \"kubernetes.io/projected/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-kube-api-access-ljmgx\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.480261 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c58816c-9134-4995-aaa0-6703afaf058c-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.480282 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9c58816c-9134-4995-aaa0-6703afaf058c-ceph\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.480296 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-scripts\") pod \"manila-scheduler-0\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.480324 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.480340 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.480357 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-ovsdbserver-sb\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.480381 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-ovsdbserver-nb\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.480403 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-config-data\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.480448 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.480464 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-config\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.480484 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-dns-svc\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.480515 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-scripts\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.480539 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7dhn\" (UniqueName: \"kubernetes.io/projected/9c58816c-9134-4995-aaa0-6703afaf058c-kube-api-access-z7dhn\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.480577 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-openstack-edpm-ipam\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.485963 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c58816c-9134-4995-aaa0-6703afaf058c-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.486459 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/9c58816c-9134-4995-aaa0-6703afaf058c-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.487766 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.503816 4809 scope.go:117] "RemoveContainer" containerID="ad16d68eebb2511cfd738bbbe5b63b8a16474e617e996c6022c048dd43fe3fa6" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.504032 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-config-data\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.507402 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-config-data\") pod \"manila-scheduler-0\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.507805 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-scripts\") pod \"manila-scheduler-0\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.508233 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.510637 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.514538 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.514952 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-scripts\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.515237 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9c58816c-9134-4995-aaa0-6703afaf058c-ceph\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.515570 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fflnv\" (UniqueName: \"kubernetes.io/projected/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-kube-api-access-fflnv\") pod \"manila-scheduler-0\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.516108 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.521468 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7dhn\" (UniqueName: \"kubernetes.io/projected/9c58816c-9134-4995-aaa0-6703afaf058c-kube-api-access-z7dhn\") pod \"manila-share-share1-0\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.581825 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-dns-swift-storage-0\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.582228 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljmgx\" (UniqueName: \"kubernetes.io/projected/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-kube-api-access-ljmgx\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.582262 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-ovsdbserver-sb\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.582282 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-ovsdbserver-nb\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.582541 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-config\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.583091 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-dns-svc\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.583129 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-openstack-edpm-ipam\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.584181 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-openstack-edpm-ipam\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.584845 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-dns-swift-storage-0\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.585634 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-ovsdbserver-sb\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.586151 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-config\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.586171 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-dns-svc\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.586207 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.586686 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-ovsdbserver-nb\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.588429 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.592977 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.618350 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.637517 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.648892 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljmgx\" (UniqueName: \"kubernetes.io/projected/4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e-kube-api-access-ljmgx\") pod \"dnsmasq-dns-74cfff99f-4rtdc\" (UID: \"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e\") " pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.668445 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.763956 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56c24c30-a3af-4fb9-907b-6240a47687b0" path="/var/lib/kubelet/pods/56c24c30-a3af-4fb9-907b-6240a47687b0/volumes" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.764926 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1b7a534-1f99-4f28-af81-d31c55d4e7f0" path="/var/lib/kubelet/pods/c1b7a534-1f99-4f28-af81-d31c55d4e7f0/volumes" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.771881 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.793058 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/716d8e2a-a847-4c31-abb1-006e352f193f-logs\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.793142 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9jsk\" (UniqueName: \"kubernetes.io/projected/716d8e2a-a847-4c31-abb1-006e352f193f-kube-api-access-s9jsk\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.793196 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/716d8e2a-a847-4c31-abb1-006e352f193f-etc-machine-id\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.793226 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-scripts\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.793263 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-config-data\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.793335 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.793396 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-config-data-custom\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.897121 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-config-data\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.897289 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.897390 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-config-data-custom\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.897446 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/716d8e2a-a847-4c31-abb1-006e352f193f-logs\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.897480 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9jsk\" (UniqueName: \"kubernetes.io/projected/716d8e2a-a847-4c31-abb1-006e352f193f-kube-api-access-s9jsk\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.897549 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/716d8e2a-a847-4c31-abb1-006e352f193f-etc-machine-id\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.897590 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-scripts\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.898154 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/716d8e2a-a847-4c31-abb1-006e352f193f-logs\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.898517 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/716d8e2a-a847-4c31-abb1-006e352f193f-etc-machine-id\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.911512 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-config-data-custom\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.917265 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-scripts\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.918972 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.925463 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-config-data\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.967150 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9jsk\" (UniqueName: \"kubernetes.io/projected/716d8e2a-a847-4c31-abb1-006e352f193f-kube-api-access-s9jsk\") pod \"manila-api-0\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " pod="openstack/manila-api-0" Sep 30 01:27:27 crc kubenswrapper[4809]: I0930 01:27:27.992447 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Sep 30 01:27:28 crc kubenswrapper[4809]: I0930 01:27:28.559531 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Sep 30 01:27:28 crc kubenswrapper[4809]: I0930 01:27:28.771730 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Sep 30 01:27:28 crc kubenswrapper[4809]: I0930 01:27:28.892132 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74cfff99f-4rtdc"] Sep 30 01:27:28 crc kubenswrapper[4809]: I0930 01:27:28.984411 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Sep 30 01:27:28 crc kubenswrapper[4809]: W0930 01:27:28.990125 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod716d8e2a_a847_4c31_abb1_006e352f193f.slice/crio-d264268cbafeeacbc62e0f324aeb87c4c795e859dbd1679ad94f09f2c64f6849 WatchSource:0}: Error finding container d264268cbafeeacbc62e0f324aeb87c4c795e859dbd1679ad94f09f2c64f6849: Status 404 returned error can't find the container with id d264268cbafeeacbc62e0f324aeb87c4c795e859dbd1679ad94f09f2c64f6849 Sep 30 01:27:29 crc kubenswrapper[4809]: I0930 01:27:29.051191 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" event={"ID":"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e","Type":"ContainerStarted","Data":"a266db73a5dc6cb3b3abc1cff1ee3a8ef4c059ef2afaf2df58f37a24afac288b"} Sep 30 01:27:29 crc kubenswrapper[4809]: I0930 01:27:29.053832 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"716d8e2a-a847-4c31-abb1-006e352f193f","Type":"ContainerStarted","Data":"d264268cbafeeacbc62e0f324aeb87c4c795e859dbd1679ad94f09f2c64f6849"} Sep 30 01:27:29 crc kubenswrapper[4809]: I0930 01:27:29.055174 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"9c58816c-9134-4995-aaa0-6703afaf058c","Type":"ContainerStarted","Data":"766f269fb184137b25cf31fc367bfd26270a11ced5dbd236bd1db4255d242b8d"} Sep 30 01:27:29 crc kubenswrapper[4809]: I0930 01:27:29.056209 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"f0bb1ff8-331b-444d-a312-0e4eb87e39f5","Type":"ContainerStarted","Data":"4ccfd4daeaf76cfd0e76e26fef9c3d604d127e405fa92015b0c073b52d23209c"} Sep 30 01:27:30 crc kubenswrapper[4809]: E0930 01:27:30.378005 4809 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4de13cc2_c89f_45c4_bb69_e86bf6ef6a8e.slice/crio-conmon-ea0df395f43d6f29f56a60aad6b45c0ae313f423dd6a33641cccbd766debf4e1.scope\": RecentStats: unable to find data in memory cache]" Sep 30 01:27:30 crc kubenswrapper[4809]: I0930 01:27:30.736900 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Sep 30 01:27:31 crc kubenswrapper[4809]: I0930 01:27:31.094568 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"716d8e2a-a847-4c31-abb1-006e352f193f","Type":"ContainerStarted","Data":"28e0ae5240b0f504366c053b5c78cd8cfdcf922c6ccc085313db6bddddb712a9"} Sep 30 01:27:31 crc kubenswrapper[4809]: I0930 01:27:31.094950 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Sep 30 01:27:31 crc kubenswrapper[4809]: I0930 01:27:31.094968 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"716d8e2a-a847-4c31-abb1-006e352f193f","Type":"ContainerStarted","Data":"48680cb8a056f90aee6b7ab0c16355f76e362edd40a27d2af247d5da14ca41ce"} Sep 30 01:27:31 crc kubenswrapper[4809]: I0930 01:27:31.094687 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="716d8e2a-a847-4c31-abb1-006e352f193f" containerName="manila-api" containerID="cri-o://28e0ae5240b0f504366c053b5c78cd8cfdcf922c6ccc085313db6bddddb712a9" gracePeriod=30 Sep 30 01:27:31 crc kubenswrapper[4809]: I0930 01:27:31.094621 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="716d8e2a-a847-4c31-abb1-006e352f193f" containerName="manila-api-log" containerID="cri-o://48680cb8a056f90aee6b7ab0c16355f76e362edd40a27d2af247d5da14ca41ce" gracePeriod=30 Sep 30 01:27:31 crc kubenswrapper[4809]: I0930 01:27:31.103612 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"f0bb1ff8-331b-444d-a312-0e4eb87e39f5","Type":"ContainerStarted","Data":"f6a5cd1237e386da1e17b4c2a50dcf91730017d496b9fe8238afbf56cfa324e2"} Sep 30 01:27:31 crc kubenswrapper[4809]: I0930 01:27:31.104719 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"f0bb1ff8-331b-444d-a312-0e4eb87e39f5","Type":"ContainerStarted","Data":"b7af432a4fb18dc4e5c76e0fef8a14c487f6703bef114e572523cb425ab45be6"} Sep 30 01:27:31 crc kubenswrapper[4809]: I0930 01:27:31.113401 4809 generic.go:334] "Generic (PLEG): container finished" podID="4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e" containerID="ea0df395f43d6f29f56a60aad6b45c0ae313f423dd6a33641cccbd766debf4e1" exitCode=0 Sep 30 01:27:31 crc kubenswrapper[4809]: I0930 01:27:31.113441 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" event={"ID":"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e","Type":"ContainerDied","Data":"ea0df395f43d6f29f56a60aad6b45c0ae313f423dd6a33641cccbd766debf4e1"} Sep 30 01:27:31 crc kubenswrapper[4809]: I0930 01:27:31.121079 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=4.121053408 podStartE2EDuration="4.121053408s" podCreationTimestamp="2025-09-30 01:27:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 01:27:31.117050518 +0000 UTC m=+4702.153299926" watchObservedRunningTime="2025-09-30 01:27:31.121053408 +0000 UTC m=+4702.157302826" Sep 30 01:27:31 crc kubenswrapper[4809]: I0930 01:27:31.183280 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=2.9420215819999997 podStartE2EDuration="4.183259349s" podCreationTimestamp="2025-09-30 01:27:27 +0000 UTC" firstStartedPulling="2025-09-30 01:27:28.575031565 +0000 UTC m=+4699.611280973" lastFinishedPulling="2025-09-30 01:27:29.816269332 +0000 UTC m=+4700.852518740" observedRunningTime="2025-09-30 01:27:31.174076916 +0000 UTC m=+4702.210326324" watchObservedRunningTime="2025-09-30 01:27:31.183259349 +0000 UTC m=+4702.219508757" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.141377 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.142316 4809 generic.go:334] "Generic (PLEG): container finished" podID="716d8e2a-a847-4c31-abb1-006e352f193f" containerID="28e0ae5240b0f504366c053b5c78cd8cfdcf922c6ccc085313db6bddddb712a9" exitCode=143 Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.142344 4809 generic.go:334] "Generic (PLEG): container finished" podID="716d8e2a-a847-4c31-abb1-006e352f193f" containerID="48680cb8a056f90aee6b7ab0c16355f76e362edd40a27d2af247d5da14ca41ce" exitCode=143 Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.142359 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"716d8e2a-a847-4c31-abb1-006e352f193f","Type":"ContainerDied","Data":"28e0ae5240b0f504366c053b5c78cd8cfdcf922c6ccc085313db6bddddb712a9"} Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.142446 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"716d8e2a-a847-4c31-abb1-006e352f193f","Type":"ContainerDied","Data":"48680cb8a056f90aee6b7ab0c16355f76e362edd40a27d2af247d5da14ca41ce"} Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.142458 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"716d8e2a-a847-4c31-abb1-006e352f193f","Type":"ContainerDied","Data":"d264268cbafeeacbc62e0f324aeb87c4c795e859dbd1679ad94f09f2c64f6849"} Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.142496 4809 scope.go:117] "RemoveContainer" containerID="28e0ae5240b0f504366c053b5c78cd8cfdcf922c6ccc085313db6bddddb712a9" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.197274 4809 scope.go:117] "RemoveContainer" containerID="48680cb8a056f90aee6b7ab0c16355f76e362edd40a27d2af247d5da14ca41ce" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.234084 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/716d8e2a-a847-4c31-abb1-006e352f193f-etc-machine-id\") pod \"716d8e2a-a847-4c31-abb1-006e352f193f\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.234149 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-scripts\") pod \"716d8e2a-a847-4c31-abb1-006e352f193f\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.234164 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/716d8e2a-a847-4c31-abb1-006e352f193f-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "716d8e2a-a847-4c31-abb1-006e352f193f" (UID: "716d8e2a-a847-4c31-abb1-006e352f193f"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.234292 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s9jsk\" (UniqueName: \"kubernetes.io/projected/716d8e2a-a847-4c31-abb1-006e352f193f-kube-api-access-s9jsk\") pod \"716d8e2a-a847-4c31-abb1-006e352f193f\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.234427 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/716d8e2a-a847-4c31-abb1-006e352f193f-logs\") pod \"716d8e2a-a847-4c31-abb1-006e352f193f\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.234456 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-config-data\") pod \"716d8e2a-a847-4c31-abb1-006e352f193f\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.234486 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-config-data-custom\") pod \"716d8e2a-a847-4c31-abb1-006e352f193f\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.234508 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-combined-ca-bundle\") pod \"716d8e2a-a847-4c31-abb1-006e352f193f\" (UID: \"716d8e2a-a847-4c31-abb1-006e352f193f\") " Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.234965 4809 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/716d8e2a-a847-4c31-abb1-006e352f193f-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.235862 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/716d8e2a-a847-4c31-abb1-006e352f193f-logs" (OuterVolumeSpecName: "logs") pod "716d8e2a-a847-4c31-abb1-006e352f193f" (UID: "716d8e2a-a847-4c31-abb1-006e352f193f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.275062 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "716d8e2a-a847-4c31-abb1-006e352f193f" (UID: "716d8e2a-a847-4c31-abb1-006e352f193f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.287928 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-scripts" (OuterVolumeSpecName: "scripts") pod "716d8e2a-a847-4c31-abb1-006e352f193f" (UID: "716d8e2a-a847-4c31-abb1-006e352f193f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.294559 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/716d8e2a-a847-4c31-abb1-006e352f193f-kube-api-access-s9jsk" (OuterVolumeSpecName: "kube-api-access-s9jsk") pod "716d8e2a-a847-4c31-abb1-006e352f193f" (UID: "716d8e2a-a847-4c31-abb1-006e352f193f"). InnerVolumeSpecName "kube-api-access-s9jsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.316789 4809 scope.go:117] "RemoveContainer" containerID="28e0ae5240b0f504366c053b5c78cd8cfdcf922c6ccc085313db6bddddb712a9" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.318894 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "716d8e2a-a847-4c31-abb1-006e352f193f" (UID: "716d8e2a-a847-4c31-abb1-006e352f193f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:32 crc kubenswrapper[4809]: E0930 01:27:32.320012 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28e0ae5240b0f504366c053b5c78cd8cfdcf922c6ccc085313db6bddddb712a9\": container with ID starting with 28e0ae5240b0f504366c053b5c78cd8cfdcf922c6ccc085313db6bddddb712a9 not found: ID does not exist" containerID="28e0ae5240b0f504366c053b5c78cd8cfdcf922c6ccc085313db6bddddb712a9" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.320046 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28e0ae5240b0f504366c053b5c78cd8cfdcf922c6ccc085313db6bddddb712a9"} err="failed to get container status \"28e0ae5240b0f504366c053b5c78cd8cfdcf922c6ccc085313db6bddddb712a9\": rpc error: code = NotFound desc = could not find container \"28e0ae5240b0f504366c053b5c78cd8cfdcf922c6ccc085313db6bddddb712a9\": container with ID starting with 28e0ae5240b0f504366c053b5c78cd8cfdcf922c6ccc085313db6bddddb712a9 not found: ID does not exist" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.320068 4809 scope.go:117] "RemoveContainer" containerID="48680cb8a056f90aee6b7ab0c16355f76e362edd40a27d2af247d5da14ca41ce" Sep 30 01:27:32 crc kubenswrapper[4809]: E0930 01:27:32.320366 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48680cb8a056f90aee6b7ab0c16355f76e362edd40a27d2af247d5da14ca41ce\": container with ID starting with 48680cb8a056f90aee6b7ab0c16355f76e362edd40a27d2af247d5da14ca41ce not found: ID does not exist" containerID="48680cb8a056f90aee6b7ab0c16355f76e362edd40a27d2af247d5da14ca41ce" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.320408 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48680cb8a056f90aee6b7ab0c16355f76e362edd40a27d2af247d5da14ca41ce"} err="failed to get container status \"48680cb8a056f90aee6b7ab0c16355f76e362edd40a27d2af247d5da14ca41ce\": rpc error: code = NotFound desc = could not find container \"48680cb8a056f90aee6b7ab0c16355f76e362edd40a27d2af247d5da14ca41ce\": container with ID starting with 48680cb8a056f90aee6b7ab0c16355f76e362edd40a27d2af247d5da14ca41ce not found: ID does not exist" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.320422 4809 scope.go:117] "RemoveContainer" containerID="28e0ae5240b0f504366c053b5c78cd8cfdcf922c6ccc085313db6bddddb712a9" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.320904 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28e0ae5240b0f504366c053b5c78cd8cfdcf922c6ccc085313db6bddddb712a9"} err="failed to get container status \"28e0ae5240b0f504366c053b5c78cd8cfdcf922c6ccc085313db6bddddb712a9\": rpc error: code = NotFound desc = could not find container \"28e0ae5240b0f504366c053b5c78cd8cfdcf922c6ccc085313db6bddddb712a9\": container with ID starting with 28e0ae5240b0f504366c053b5c78cd8cfdcf922c6ccc085313db6bddddb712a9 not found: ID does not exist" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.320944 4809 scope.go:117] "RemoveContainer" containerID="48680cb8a056f90aee6b7ab0c16355f76e362edd40a27d2af247d5da14ca41ce" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.328342 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48680cb8a056f90aee6b7ab0c16355f76e362edd40a27d2af247d5da14ca41ce"} err="failed to get container status \"48680cb8a056f90aee6b7ab0c16355f76e362edd40a27d2af247d5da14ca41ce\": rpc error: code = NotFound desc = could not find container \"48680cb8a056f90aee6b7ab0c16355f76e362edd40a27d2af247d5da14ca41ce\": container with ID starting with 48680cb8a056f90aee6b7ab0c16355f76e362edd40a27d2af247d5da14ca41ce not found: ID does not exist" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.336870 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s9jsk\" (UniqueName: \"kubernetes.io/projected/716d8e2a-a847-4c31-abb1-006e352f193f-kube-api-access-s9jsk\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.336895 4809 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/716d8e2a-a847-4c31-abb1-006e352f193f-logs\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.336905 4809 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.337031 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.337041 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.368351 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-config-data" (OuterVolumeSpecName: "config-data") pod "716d8e2a-a847-4c31-abb1-006e352f193f" (UID: "716d8e2a-a847-4c31-abb1-006e352f193f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:32 crc kubenswrapper[4809]: I0930 01:27:32.438772 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/716d8e2a-a847-4c31-abb1-006e352f193f-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.085270 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.085589 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerName="ceilometer-notification-agent" containerID="cri-o://eee190d73383e3a230fa183925148a18e1280eb0d5e3876c8dda27b8b20bb53c" gracePeriod=30 Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.085688 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerName="sg-core" containerID="cri-o://ad2081442abe517315210f46018d2b78c6913ad8da346ada4518872d65c3f160" gracePeriod=30 Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.085700 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerName="proxy-httpd" containerID="cri-o://fc66edf39074ad29033c8ec24e5eb248910800f641f53339f96195a8d7626611" gracePeriod=30 Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.085606 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerName="ceilometer-central-agent" containerID="cri-o://3ae14c17879d86886ed66292d338f448fc8841c116f99b7f9f5fe79afce22874" gracePeriod=30 Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.159824 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" event={"ID":"4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e","Type":"ContainerStarted","Data":"4bb111172bcb4ea821b2987853bde9065c38aa0f89a97956425fcc6aacf613d1"} Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.159966 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.163266 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.184596 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" podStartSLOduration=6.184581611 podStartE2EDuration="6.184581611s" podCreationTimestamp="2025-09-30 01:27:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 01:27:33.177308931 +0000 UTC m=+4704.213558349" watchObservedRunningTime="2025-09-30 01:27:33.184581611 +0000 UTC m=+4704.220831019" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.211251 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.228729 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-api-0"] Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.245799 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Sep 30 01:27:33 crc kubenswrapper[4809]: E0930 01:27:33.246232 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716d8e2a-a847-4c31-abb1-006e352f193f" containerName="manila-api-log" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.246245 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="716d8e2a-a847-4c31-abb1-006e352f193f" containerName="manila-api-log" Sep 30 01:27:33 crc kubenswrapper[4809]: E0930 01:27:33.246279 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716d8e2a-a847-4c31-abb1-006e352f193f" containerName="manila-api" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.246287 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="716d8e2a-a847-4c31-abb1-006e352f193f" containerName="manila-api" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.246491 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="716d8e2a-a847-4c31-abb1-006e352f193f" containerName="manila-api" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.246520 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="716d8e2a-a847-4c31-abb1-006e352f193f" containerName="manila-api-log" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.247670 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.258115 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.260176 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-internal-svc" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.260792 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.261012 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-public-svc" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.359774 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89f47877-ed2c-4340-9fa4-e69b105d4d9d-scripts\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.359854 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrvqv\" (UniqueName: \"kubernetes.io/projected/89f47877-ed2c-4340-9fa4-e69b105d4d9d-kube-api-access-hrvqv\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.359878 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/89f47877-ed2c-4340-9fa4-e69b105d4d9d-config-data-custom\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.360068 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89f47877-ed2c-4340-9fa4-e69b105d4d9d-public-tls-certs\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.360194 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89f47877-ed2c-4340-9fa4-e69b105d4d9d-config-data\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.361370 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/89f47877-ed2c-4340-9fa4-e69b105d4d9d-internal-tls-certs\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.361705 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89f47877-ed2c-4340-9fa4-e69b105d4d9d-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.361791 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/89f47877-ed2c-4340-9fa4-e69b105d4d9d-etc-machine-id\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.361816 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89f47877-ed2c-4340-9fa4-e69b105d4d9d-logs\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.464184 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/89f47877-ed2c-4340-9fa4-e69b105d4d9d-config-data-custom\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.464295 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89f47877-ed2c-4340-9fa4-e69b105d4d9d-public-tls-certs\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.464338 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89f47877-ed2c-4340-9fa4-e69b105d4d9d-config-data\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.464416 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/89f47877-ed2c-4340-9fa4-e69b105d4d9d-internal-tls-certs\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.464447 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89f47877-ed2c-4340-9fa4-e69b105d4d9d-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.464480 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/89f47877-ed2c-4340-9fa4-e69b105d4d9d-etc-machine-id\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.464506 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89f47877-ed2c-4340-9fa4-e69b105d4d9d-logs\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.464584 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89f47877-ed2c-4340-9fa4-e69b105d4d9d-scripts\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.464624 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrvqv\" (UniqueName: \"kubernetes.io/projected/89f47877-ed2c-4340-9fa4-e69b105d4d9d-kube-api-access-hrvqv\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.469631 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89f47877-ed2c-4340-9fa4-e69b105d4d9d-logs\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.469731 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/89f47877-ed2c-4340-9fa4-e69b105d4d9d-etc-machine-id\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.473339 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/89f47877-ed2c-4340-9fa4-e69b105d4d9d-config-data-custom\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.474051 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89f47877-ed2c-4340-9fa4-e69b105d4d9d-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.476283 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89f47877-ed2c-4340-9fa4-e69b105d4d9d-config-data\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.489172 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/89f47877-ed2c-4340-9fa4-e69b105d4d9d-internal-tls-certs\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.489182 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89f47877-ed2c-4340-9fa4-e69b105d4d9d-scripts\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.489594 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89f47877-ed2c-4340-9fa4-e69b105d4d9d-public-tls-certs\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.492012 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrvqv\" (UniqueName: \"kubernetes.io/projected/89f47877-ed2c-4340-9fa4-e69b105d4d9d-kube-api-access-hrvqv\") pod \"manila-api-0\" (UID: \"89f47877-ed2c-4340-9fa4-e69b105d4d9d\") " pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.583384 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.718355 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="716d8e2a-a847-4c31-abb1-006e352f193f" path="/var/lib/kubelet/pods/716d8e2a-a847-4c31-abb1-006e352f193f/volumes" Sep 30 01:27:33 crc kubenswrapper[4809]: I0930 01:27:33.922006 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.247:3000/\": dial tcp 10.217.0.247:3000: connect: connection refused" Sep 30 01:27:34 crc kubenswrapper[4809]: I0930 01:27:34.154843 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Sep 30 01:27:34 crc kubenswrapper[4809]: W0930 01:27:34.161406 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89f47877_ed2c_4340_9fa4_e69b105d4d9d.slice/crio-0b82132002229507354d9d7ba661cb6d74b28fca2b146f875bf83b18bc11dff0 WatchSource:0}: Error finding container 0b82132002229507354d9d7ba661cb6d74b28fca2b146f875bf83b18bc11dff0: Status 404 returned error can't find the container with id 0b82132002229507354d9d7ba661cb6d74b28fca2b146f875bf83b18bc11dff0 Sep 30 01:27:34 crc kubenswrapper[4809]: I0930 01:27:34.173171 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"89f47877-ed2c-4340-9fa4-e69b105d4d9d","Type":"ContainerStarted","Data":"0b82132002229507354d9d7ba661cb6d74b28fca2b146f875bf83b18bc11dff0"} Sep 30 01:27:34 crc kubenswrapper[4809]: I0930 01:27:34.175673 4809 generic.go:334] "Generic (PLEG): container finished" podID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerID="3ae14c17879d86886ed66292d338f448fc8841c116f99b7f9f5fe79afce22874" exitCode=0 Sep 30 01:27:34 crc kubenswrapper[4809]: I0930 01:27:34.175697 4809 generic.go:334] "Generic (PLEG): container finished" podID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerID="fc66edf39074ad29033c8ec24e5eb248910800f641f53339f96195a8d7626611" exitCode=0 Sep 30 01:27:34 crc kubenswrapper[4809]: I0930 01:27:34.175705 4809 generic.go:334] "Generic (PLEG): container finished" podID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerID="ad2081442abe517315210f46018d2b78c6913ad8da346ada4518872d65c3f160" exitCode=2 Sep 30 01:27:34 crc kubenswrapper[4809]: I0930 01:27:34.175744 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1","Type":"ContainerDied","Data":"3ae14c17879d86886ed66292d338f448fc8841c116f99b7f9f5fe79afce22874"} Sep 30 01:27:34 crc kubenswrapper[4809]: I0930 01:27:34.175789 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1","Type":"ContainerDied","Data":"fc66edf39074ad29033c8ec24e5eb248910800f641f53339f96195a8d7626611"} Sep 30 01:27:34 crc kubenswrapper[4809]: I0930 01:27:34.175802 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1","Type":"ContainerDied","Data":"ad2081442abe517315210f46018d2b78c6913ad8da346ada4518872d65c3f160"} Sep 30 01:27:35 crc kubenswrapper[4809]: I0930 01:27:35.186730 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"89f47877-ed2c-4340-9fa4-e69b105d4d9d","Type":"ContainerStarted","Data":"ff1db55dadccb0e51b6621686c640ea2e30b8ed4bfa8045400e2d25a24b8a893"} Sep 30 01:27:35 crc kubenswrapper[4809]: I0930 01:27:35.192097 4809 generic.go:334] "Generic (PLEG): container finished" podID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerID="eee190d73383e3a230fa183925148a18e1280eb0d5e3876c8dda27b8b20bb53c" exitCode=0 Sep 30 01:27:35 crc kubenswrapper[4809]: I0930 01:27:35.192141 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1","Type":"ContainerDied","Data":"eee190d73383e3a230fa183925148a18e1280eb0d5e3876c8dda27b8b20bb53c"} Sep 30 01:27:35 crc kubenswrapper[4809]: I0930 01:27:35.824549 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-64c7465c7b-5tvxt" podUID="9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.66:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.66:8443: connect: connection refused" Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.455250 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.457393 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rrn4l\" (UniqueName: \"kubernetes.io/projected/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-kube-api-access-rrn4l\") pod \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.457481 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-run-httpd\") pod \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.457546 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-ceilometer-tls-certs\") pod \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.457576 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-sg-core-conf-yaml\") pod \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.457626 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-config-data\") pod \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.457772 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-log-httpd\") pod \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.458418 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" (UID: "ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.458834 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" (UID: "ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.463863 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-kube-api-access-rrn4l" (OuterVolumeSpecName: "kube-api-access-rrn4l") pod "ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" (UID: "ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1"). InnerVolumeSpecName "kube-api-access-rrn4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.535061 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" (UID: "ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.559464 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-scripts\") pod \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.560482 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-combined-ca-bundle\") pod \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\" (UID: \"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1\") " Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.561065 4809 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.561086 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rrn4l\" (UniqueName: \"kubernetes.io/projected/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-kube-api-access-rrn4l\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.561100 4809 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.561111 4809 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.574940 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-scripts" (OuterVolumeSpecName: "scripts") pod "ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" (UID: "ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.598853 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" (UID: "ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.640709 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.663700 4809 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.663728 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.664695 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-config-data" (OuterVolumeSpecName: "config-data") pod "ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" (UID: "ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.695233 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" (UID: "ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.766121 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.766366 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.774591 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-74cfff99f-4rtdc" Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.866064 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-768b698657-rwt8x"] Sep 30 01:27:37 crc kubenswrapper[4809]: I0930 01:27:37.866567 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-768b698657-rwt8x" podUID="207213cd-fc29-4597-936a-e8f332106ee7" containerName="dnsmasq-dns" containerID="cri-o://64f265b9f3640b0bf65dd37eccf84f09e55dac5818dd9b8ed55f62ebe4a80b11" gracePeriod=10 Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.231882 4809 generic.go:334] "Generic (PLEG): container finished" podID="207213cd-fc29-4597-936a-e8f332106ee7" containerID="64f265b9f3640b0bf65dd37eccf84f09e55dac5818dd9b8ed55f62ebe4a80b11" exitCode=0 Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.231946 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-768b698657-rwt8x" event={"ID":"207213cd-fc29-4597-936a-e8f332106ee7","Type":"ContainerDied","Data":"64f265b9f3640b0bf65dd37eccf84f09e55dac5818dd9b8ed55f62ebe4a80b11"} Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.234380 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"89f47877-ed2c-4340-9fa4-e69b105d4d9d","Type":"ContainerStarted","Data":"98e3f11130365b8639c75dc5666491a8e36cdd2ae1cf16bfc5204e213a6d2dc2"} Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.234502 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.235838 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"9c58816c-9134-4995-aaa0-6703afaf058c","Type":"ContainerStarted","Data":"3adb634454ae941062fa1254b37b6a1991ef209a62e8496746271d7ea43508a0"} Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.248220 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1","Type":"ContainerDied","Data":"6dd4f5074ad20ab986161a066c78ce7cfd0c78eca82fc2ed64bf91b48eb31b8c"} Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.248253 4809 scope.go:117] "RemoveContainer" containerID="3ae14c17879d86886ed66292d338f448fc8841c116f99b7f9f5fe79afce22874" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.248409 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.252505 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=5.252479953 podStartE2EDuration="5.252479953s" podCreationTimestamp="2025-09-30 01:27:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 01:27:38.249673006 +0000 UTC m=+4709.285922414" watchObservedRunningTime="2025-09-30 01:27:38.252479953 +0000 UTC m=+4709.288729351" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.291405 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.330767 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.346087 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 01:27:38 crc kubenswrapper[4809]: E0930 01:27:38.346596 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerName="proxy-httpd" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.346615 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerName="proxy-httpd" Sep 30 01:27:38 crc kubenswrapper[4809]: E0930 01:27:38.346628 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerName="ceilometer-notification-agent" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.346759 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerName="ceilometer-notification-agent" Sep 30 01:27:38 crc kubenswrapper[4809]: E0930 01:27:38.346776 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerName="sg-core" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.346782 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerName="sg-core" Sep 30 01:27:38 crc kubenswrapper[4809]: E0930 01:27:38.346819 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerName="ceilometer-central-agent" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.346825 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerName="ceilometer-central-agent" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.347025 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerName="sg-core" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.347044 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerName="ceilometer-central-agent" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.347056 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerName="ceilometer-notification-agent" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.347080 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" containerName="proxy-httpd" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.349043 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.350788 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.351724 4809 scope.go:117] "RemoveContainer" containerID="fc66edf39074ad29033c8ec24e5eb248910800f641f53339f96195a8d7626611" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.354846 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.355831 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.371990 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.384452 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-scripts\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.384495 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38228c3a-dc1c-4435-b592-5514407bd351-log-httpd\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.384593 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-config-data\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.384761 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5m2q\" (UniqueName: \"kubernetes.io/projected/38228c3a-dc1c-4435-b592-5514407bd351-kube-api-access-n5m2q\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.384825 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.384912 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38228c3a-dc1c-4435-b592-5514407bd351-run-httpd\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.385176 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.385252 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.487474 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-config-data\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.487573 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5m2q\" (UniqueName: \"kubernetes.io/projected/38228c3a-dc1c-4435-b592-5514407bd351-kube-api-access-n5m2q\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.487597 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.487652 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38228c3a-dc1c-4435-b592-5514407bd351-run-httpd\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.487685 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.487731 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.487763 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-scripts\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.487787 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38228c3a-dc1c-4435-b592-5514407bd351-log-httpd\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.488332 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38228c3a-dc1c-4435-b592-5514407bd351-log-httpd\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:38 crc kubenswrapper[4809]: I0930 01:27:38.488580 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38228c3a-dc1c-4435-b592-5514407bd351-run-httpd\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.264366 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"9c58816c-9134-4995-aaa0-6703afaf058c","Type":"ContainerStarted","Data":"c88818285b0b04aba003107a6341cdab2835254c5f3431a292d4d79fc917ff3b"} Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.275294 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-768b698657-rwt8x" event={"ID":"207213cd-fc29-4597-936a-e8f332106ee7","Type":"ContainerDied","Data":"ddba901eb47490e6feb9081cb28b1b74ee46f8684154c88730b21506046abf3a"} Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.275378 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ddba901eb47490e6feb9081cb28b1b74ee46f8684154c88730b21506046abf3a" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.305133 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.305572 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-scripts\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.306399 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.306759 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.310066 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5m2q\" (UniqueName: \"kubernetes.io/projected/38228c3a-dc1c-4435-b592-5514407bd351-kube-api-access-n5m2q\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.321613 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-config-data\") pod \"ceilometer-0\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " pod="openstack/ceilometer-0" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.460844 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.464157 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.473040 4809 scope.go:117] "RemoveContainer" containerID="ad2081442abe517315210f46018d2b78c6913ad8da346ada4518872d65c3f160" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.520792 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-openstack-edpm-ipam\") pod \"207213cd-fc29-4597-936a-e8f332106ee7\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.520832 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-ovsdbserver-nb\") pod \"207213cd-fc29-4597-936a-e8f332106ee7\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.520912 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-dns-svc\") pod \"207213cd-fc29-4597-936a-e8f332106ee7\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.520960 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mm7mq\" (UniqueName: \"kubernetes.io/projected/207213cd-fc29-4597-936a-e8f332106ee7-kube-api-access-mm7mq\") pod \"207213cd-fc29-4597-936a-e8f332106ee7\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.520987 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-ovsdbserver-sb\") pod \"207213cd-fc29-4597-936a-e8f332106ee7\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.521013 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-dns-swift-storage-0\") pod \"207213cd-fc29-4597-936a-e8f332106ee7\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.522326 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=4.214984662 podStartE2EDuration="12.522302228s" podCreationTimestamp="2025-09-30 01:27:27 +0000 UTC" firstStartedPulling="2025-09-30 01:27:28.788752222 +0000 UTC m=+4699.825001640" lastFinishedPulling="2025-09-30 01:27:37.096069798 +0000 UTC m=+4708.132319206" observedRunningTime="2025-09-30 01:27:39.298911254 +0000 UTC m=+4710.335160702" watchObservedRunningTime="2025-09-30 01:27:39.522302228 +0000 UTC m=+4710.558551646" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.531944 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/207213cd-fc29-4597-936a-e8f332106ee7-kube-api-access-mm7mq" (OuterVolumeSpecName: "kube-api-access-mm7mq") pod "207213cd-fc29-4597-936a-e8f332106ee7" (UID: "207213cd-fc29-4597-936a-e8f332106ee7"). InnerVolumeSpecName "kube-api-access-mm7mq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.585828 4809 scope.go:117] "RemoveContainer" containerID="eee190d73383e3a230fa183925148a18e1280eb0d5e3876c8dda27b8b20bb53c" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.624484 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-config\") pod \"207213cd-fc29-4597-936a-e8f332106ee7\" (UID: \"207213cd-fc29-4597-936a-e8f332106ee7\") " Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.626420 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mm7mq\" (UniqueName: \"kubernetes.io/projected/207213cd-fc29-4597-936a-e8f332106ee7-kube-api-access-mm7mq\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.738815 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1" path="/var/lib/kubelet/pods/ed0b8f00-bb2d-4e6d-8d23-e1fdfc1bd5c1/volumes" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.821927 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "207213cd-fc29-4597-936a-e8f332106ee7" (UID: "207213cd-fc29-4597-936a-e8f332106ee7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.826838 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "207213cd-fc29-4597-936a-e8f332106ee7" (UID: "207213cd-fc29-4597-936a-e8f332106ee7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.830711 4809 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.830736 4809 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.834242 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "207213cd-fc29-4597-936a-e8f332106ee7" (UID: "207213cd-fc29-4597-936a-e8f332106ee7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.862546 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "207213cd-fc29-4597-936a-e8f332106ee7" (UID: "207213cd-fc29-4597-936a-e8f332106ee7"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.872048 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-config" (OuterVolumeSpecName: "config") pod "207213cd-fc29-4597-936a-e8f332106ee7" (UID: "207213cd-fc29-4597-936a-e8f332106ee7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.881719 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "207213cd-fc29-4597-936a-e8f332106ee7" (UID: "207213cd-fc29-4597-936a-e8f332106ee7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.932601 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.932635 4809 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-config\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.932724 4809 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:39 crc kubenswrapper[4809]: I0930 01:27:39.932735 4809 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/207213cd-fc29-4597-936a-e8f332106ee7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:40 crc kubenswrapper[4809]: I0930 01:27:40.177486 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 01:27:40 crc kubenswrapper[4809]: I0930 01:27:40.286538 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-768b698657-rwt8x" Sep 30 01:27:40 crc kubenswrapper[4809]: I0930 01:27:40.333085 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-768b698657-rwt8x"] Sep 30 01:27:40 crc kubenswrapper[4809]: I0930 01:27:40.344868 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-768b698657-rwt8x"] Sep 30 01:27:40 crc kubenswrapper[4809]: W0930 01:27:40.568264 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38228c3a_dc1c_4435_b592_5514407bd351.slice/crio-0f69f75919caae2000882b14020744020f9c86ad7a1b95b77a4bcba908d406d8 WatchSource:0}: Error finding container 0f69f75919caae2000882b14020744020f9c86ad7a1b95b77a4bcba908d406d8: Status 404 returned error can't find the container with id 0f69f75919caae2000882b14020744020f9c86ad7a1b95b77a4bcba908d406d8 Sep 30 01:27:41 crc kubenswrapper[4809]: I0930 01:27:41.052218 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 01:27:41 crc kubenswrapper[4809]: I0930 01:27:41.298315 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38228c3a-dc1c-4435-b592-5514407bd351","Type":"ContainerStarted","Data":"0f69f75919caae2000882b14020744020f9c86ad7a1b95b77a4bcba908d406d8"} Sep 30 01:27:41 crc kubenswrapper[4809]: I0930 01:27:41.714148 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="207213cd-fc29-4597-936a-e8f332106ee7" path="/var/lib/kubelet/pods/207213cd-fc29-4597-936a-e8f332106ee7/volumes" Sep 30 01:27:42 crc kubenswrapper[4809]: I0930 01:27:42.311158 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38228c3a-dc1c-4435-b592-5514407bd351","Type":"ContainerStarted","Data":"3662074eeb1b673bcc855078f400db09a283e38dd73474bd3e617722896d8363"} Sep 30 01:27:43 crc kubenswrapper[4809]: I0930 01:27:43.327101 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38228c3a-dc1c-4435-b592-5514407bd351","Type":"ContainerStarted","Data":"7d14c72576cb25e175aea8510107d8a7d79728c188b207b9fe804ca5711bbb0c"} Sep 30 01:27:43 crc kubenswrapper[4809]: I0930 01:27:43.327582 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38228c3a-dc1c-4435-b592-5514407bd351","Type":"ContainerStarted","Data":"39c58e0a487626bcd65e1751b4fb9c47efe42d90f30020e3a63293c50dd01598"} Sep 30 01:27:45 crc kubenswrapper[4809]: I0930 01:27:45.352314 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38228c3a-dc1c-4435-b592-5514407bd351","Type":"ContainerStarted","Data":"8c6266593b1d996914edc861db81d423a89eb22078199d726d6ad3fe21ea23ed"} Sep 30 01:27:45 crc kubenswrapper[4809]: I0930 01:27:45.352775 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="38228c3a-dc1c-4435-b592-5514407bd351" containerName="ceilometer-central-agent" containerID="cri-o://3662074eeb1b673bcc855078f400db09a283e38dd73474bd3e617722896d8363" gracePeriod=30 Sep 30 01:27:45 crc kubenswrapper[4809]: I0930 01:27:45.353273 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="38228c3a-dc1c-4435-b592-5514407bd351" containerName="ceilometer-notification-agent" containerID="cri-o://39c58e0a487626bcd65e1751b4fb9c47efe42d90f30020e3a63293c50dd01598" gracePeriod=30 Sep 30 01:27:45 crc kubenswrapper[4809]: I0930 01:27:45.353302 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="38228c3a-dc1c-4435-b592-5514407bd351" containerName="proxy-httpd" containerID="cri-o://8c6266593b1d996914edc861db81d423a89eb22078199d726d6ad3fe21ea23ed" gracePeriod=30 Sep 30 01:27:45 crc kubenswrapper[4809]: I0930 01:27:45.353273 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="38228c3a-dc1c-4435-b592-5514407bd351" containerName="sg-core" containerID="cri-o://7d14c72576cb25e175aea8510107d8a7d79728c188b207b9fe804ca5711bbb0c" gracePeriod=30 Sep 30 01:27:45 crc kubenswrapper[4809]: I0930 01:27:45.353550 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 01:27:45 crc kubenswrapper[4809]: I0930 01:27:45.392135 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.6254983320000003 podStartE2EDuration="7.392101145s" podCreationTimestamp="2025-09-30 01:27:38 +0000 UTC" firstStartedPulling="2025-09-30 01:27:40.57173614 +0000 UTC m=+4711.607985558" lastFinishedPulling="2025-09-30 01:27:44.338338953 +0000 UTC m=+4715.374588371" observedRunningTime="2025-09-30 01:27:45.378969963 +0000 UTC m=+4716.415219371" watchObservedRunningTime="2025-09-30 01:27:45.392101145 +0000 UTC m=+4716.428350553" Sep 30 01:27:45 crc kubenswrapper[4809]: I0930 01:27:45.824524 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-64c7465c7b-5tvxt" podUID="9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.66:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.66:8443: connect: connection refused" Sep 30 01:27:45 crc kubenswrapper[4809]: I0930 01:27:45.824665 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:27:46 crc kubenswrapper[4809]: I0930 01:27:46.364042 4809 generic.go:334] "Generic (PLEG): container finished" podID="38228c3a-dc1c-4435-b592-5514407bd351" containerID="8c6266593b1d996914edc861db81d423a89eb22078199d726d6ad3fe21ea23ed" exitCode=0 Sep 30 01:27:46 crc kubenswrapper[4809]: I0930 01:27:46.364287 4809 generic.go:334] "Generic (PLEG): container finished" podID="38228c3a-dc1c-4435-b592-5514407bd351" containerID="7d14c72576cb25e175aea8510107d8a7d79728c188b207b9fe804ca5711bbb0c" exitCode=2 Sep 30 01:27:46 crc kubenswrapper[4809]: I0930 01:27:46.364296 4809 generic.go:334] "Generic (PLEG): container finished" podID="38228c3a-dc1c-4435-b592-5514407bd351" containerID="39c58e0a487626bcd65e1751b4fb9c47efe42d90f30020e3a63293c50dd01598" exitCode=0 Sep 30 01:27:46 crc kubenswrapper[4809]: I0930 01:27:46.364119 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38228c3a-dc1c-4435-b592-5514407bd351","Type":"ContainerDied","Data":"8c6266593b1d996914edc861db81d423a89eb22078199d726d6ad3fe21ea23ed"} Sep 30 01:27:46 crc kubenswrapper[4809]: I0930 01:27:46.364322 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38228c3a-dc1c-4435-b592-5514407bd351","Type":"ContainerDied","Data":"7d14c72576cb25e175aea8510107d8a7d79728c188b207b9fe804ca5711bbb0c"} Sep 30 01:27:46 crc kubenswrapper[4809]: I0930 01:27:46.364333 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38228c3a-dc1c-4435-b592-5514407bd351","Type":"ContainerDied","Data":"39c58e0a487626bcd65e1751b4fb9c47efe42d90f30020e3a63293c50dd01598"} Sep 30 01:27:47 crc kubenswrapper[4809]: I0930 01:27:47.669968 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.396706 4809 generic.go:334] "Generic (PLEG): container finished" podID="38228c3a-dc1c-4435-b592-5514407bd351" containerID="3662074eeb1b673bcc855078f400db09a283e38dd73474bd3e617722896d8363" exitCode=0 Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.396749 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38228c3a-dc1c-4435-b592-5514407bd351","Type":"ContainerDied","Data":"3662074eeb1b673bcc855078f400db09a283e38dd73474bd3e617722896d8363"} Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.847251 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.868102 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-combined-ca-bundle\") pod \"38228c3a-dc1c-4435-b592-5514407bd351\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.868343 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38228c3a-dc1c-4435-b592-5514407bd351-run-httpd\") pod \"38228c3a-dc1c-4435-b592-5514407bd351\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.868462 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-sg-core-conf-yaml\") pod \"38228c3a-dc1c-4435-b592-5514407bd351\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.868511 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5m2q\" (UniqueName: \"kubernetes.io/projected/38228c3a-dc1c-4435-b592-5514407bd351-kube-api-access-n5m2q\") pod \"38228c3a-dc1c-4435-b592-5514407bd351\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.868535 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-scripts\") pod \"38228c3a-dc1c-4435-b592-5514407bd351\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.868555 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38228c3a-dc1c-4435-b592-5514407bd351-log-httpd\") pod \"38228c3a-dc1c-4435-b592-5514407bd351\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.868585 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-ceilometer-tls-certs\") pod \"38228c3a-dc1c-4435-b592-5514407bd351\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.868603 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-config-data\") pod \"38228c3a-dc1c-4435-b592-5514407bd351\" (UID: \"38228c3a-dc1c-4435-b592-5514407bd351\") " Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.871146 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38228c3a-dc1c-4435-b592-5514407bd351-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "38228c3a-dc1c-4435-b592-5514407bd351" (UID: "38228c3a-dc1c-4435-b592-5514407bd351"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.873116 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38228c3a-dc1c-4435-b592-5514407bd351-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "38228c3a-dc1c-4435-b592-5514407bd351" (UID: "38228c3a-dc1c-4435-b592-5514407bd351"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.878946 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38228c3a-dc1c-4435-b592-5514407bd351-kube-api-access-n5m2q" (OuterVolumeSpecName: "kube-api-access-n5m2q") pod "38228c3a-dc1c-4435-b592-5514407bd351" (UID: "38228c3a-dc1c-4435-b592-5514407bd351"). InnerVolumeSpecName "kube-api-access-n5m2q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.880904 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-scripts" (OuterVolumeSpecName: "scripts") pod "38228c3a-dc1c-4435-b592-5514407bd351" (UID: "38228c3a-dc1c-4435-b592-5514407bd351"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.911966 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "38228c3a-dc1c-4435-b592-5514407bd351" (UID: "38228c3a-dc1c-4435-b592-5514407bd351"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.946371 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "38228c3a-dc1c-4435-b592-5514407bd351" (UID: "38228c3a-dc1c-4435-b592-5514407bd351"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.971165 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5m2q\" (UniqueName: \"kubernetes.io/projected/38228c3a-dc1c-4435-b592-5514407bd351-kube-api-access-n5m2q\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.971207 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.971218 4809 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38228c3a-dc1c-4435-b592-5514407bd351-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.971227 4809 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.971236 4809 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38228c3a-dc1c-4435-b592-5514407bd351-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:48 crc kubenswrapper[4809]: I0930 01:27:48.971244 4809 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.008240 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "38228c3a-dc1c-4435-b592-5514407bd351" (UID: "38228c3a-dc1c-4435-b592-5514407bd351"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.010358 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-config-data" (OuterVolumeSpecName: "config-data") pod "38228c3a-dc1c-4435-b592-5514407bd351" (UID: "38228c3a-dc1c-4435-b592-5514407bd351"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.072874 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.072905 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38228c3a-dc1c-4435-b592-5514407bd351-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.236630 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.334720 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.408782 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38228c3a-dc1c-4435-b592-5514407bd351","Type":"ContainerDied","Data":"0f69f75919caae2000882b14020744020f9c86ad7a1b95b77a4bcba908d406d8"} Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.408829 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.408858 4809 scope.go:117] "RemoveContainer" containerID="8c6266593b1d996914edc861db81d423a89eb22078199d726d6ad3fe21ea23ed" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.408939 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="9c58816c-9134-4995-aaa0-6703afaf058c" containerName="manila-share" containerID="cri-o://3adb634454ae941062fa1254b37b6a1991ef209a62e8496746271d7ea43508a0" gracePeriod=30 Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.411347 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="9c58816c-9134-4995-aaa0-6703afaf058c" containerName="probe" containerID="cri-o://c88818285b0b04aba003107a6341cdab2835254c5f3431a292d4d79fc917ff3b" gracePeriod=30 Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.489956 4809 scope.go:117] "RemoveContainer" containerID="7d14c72576cb25e175aea8510107d8a7d79728c188b207b9fe804ca5711bbb0c" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.493462 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.504352 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.520409 4809 scope.go:117] "RemoveContainer" containerID="39c58e0a487626bcd65e1751b4fb9c47efe42d90f30020e3a63293c50dd01598" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.526548 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 01:27:49 crc kubenswrapper[4809]: E0930 01:27:49.527075 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="207213cd-fc29-4597-936a-e8f332106ee7" containerName="init" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.527093 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="207213cd-fc29-4597-936a-e8f332106ee7" containerName="init" Sep 30 01:27:49 crc kubenswrapper[4809]: E0930 01:27:49.527131 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38228c3a-dc1c-4435-b592-5514407bd351" containerName="sg-core" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.527137 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="38228c3a-dc1c-4435-b592-5514407bd351" containerName="sg-core" Sep 30 01:27:49 crc kubenswrapper[4809]: E0930 01:27:49.527155 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="207213cd-fc29-4597-936a-e8f332106ee7" containerName="dnsmasq-dns" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.527161 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="207213cd-fc29-4597-936a-e8f332106ee7" containerName="dnsmasq-dns" Sep 30 01:27:49 crc kubenswrapper[4809]: E0930 01:27:49.527173 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38228c3a-dc1c-4435-b592-5514407bd351" containerName="proxy-httpd" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.527179 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="38228c3a-dc1c-4435-b592-5514407bd351" containerName="proxy-httpd" Sep 30 01:27:49 crc kubenswrapper[4809]: E0930 01:27:49.527191 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38228c3a-dc1c-4435-b592-5514407bd351" containerName="ceilometer-central-agent" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.527197 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="38228c3a-dc1c-4435-b592-5514407bd351" containerName="ceilometer-central-agent" Sep 30 01:27:49 crc kubenswrapper[4809]: E0930 01:27:49.527214 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38228c3a-dc1c-4435-b592-5514407bd351" containerName="ceilometer-notification-agent" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.527220 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="38228c3a-dc1c-4435-b592-5514407bd351" containerName="ceilometer-notification-agent" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.527419 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="38228c3a-dc1c-4435-b592-5514407bd351" containerName="ceilometer-notification-agent" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.527437 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="38228c3a-dc1c-4435-b592-5514407bd351" containerName="sg-core" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.527452 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="38228c3a-dc1c-4435-b592-5514407bd351" containerName="proxy-httpd" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.527466 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="38228c3a-dc1c-4435-b592-5514407bd351" containerName="ceilometer-central-agent" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.527472 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="207213cd-fc29-4597-936a-e8f332106ee7" containerName="dnsmasq-dns" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.529792 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.532306 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.533942 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.534182 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.539956 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.543472 4809 scope.go:117] "RemoveContainer" containerID="3662074eeb1b673bcc855078f400db09a283e38dd73474bd3e617722896d8363" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.567053 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.594598 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da43457d-28e7-451d-a937-b7d51e56da99-config-data\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.594670 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/da43457d-28e7-451d-a937-b7d51e56da99-scripts\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.595595 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/da43457d-28e7-451d-a937-b7d51e56da99-run-httpd\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.595743 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tg9st\" (UniqueName: \"kubernetes.io/projected/da43457d-28e7-451d-a937-b7d51e56da99-kube-api-access-tg9st\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.595816 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/da43457d-28e7-451d-a937-b7d51e56da99-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.595963 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/da43457d-28e7-451d-a937-b7d51e56da99-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.596004 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/da43457d-28e7-451d-a937-b7d51e56da99-log-httpd\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.596028 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da43457d-28e7-451d-a937-b7d51e56da99-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.634674 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.698126 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/da43457d-28e7-451d-a937-b7d51e56da99-run-httpd\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.698179 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tg9st\" (UniqueName: \"kubernetes.io/projected/da43457d-28e7-451d-a937-b7d51e56da99-kube-api-access-tg9st\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.698215 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/da43457d-28e7-451d-a937-b7d51e56da99-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.698290 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/da43457d-28e7-451d-a937-b7d51e56da99-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.698319 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/da43457d-28e7-451d-a937-b7d51e56da99-log-httpd\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.698338 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da43457d-28e7-451d-a937-b7d51e56da99-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.698379 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da43457d-28e7-451d-a937-b7d51e56da99-config-data\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.698401 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/da43457d-28e7-451d-a937-b7d51e56da99-scripts\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.698795 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/da43457d-28e7-451d-a937-b7d51e56da99-run-httpd\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.698939 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/da43457d-28e7-451d-a937-b7d51e56da99-log-httpd\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.703628 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da43457d-28e7-451d-a937-b7d51e56da99-config-data\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.704244 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/da43457d-28e7-451d-a937-b7d51e56da99-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.704738 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/da43457d-28e7-451d-a937-b7d51e56da99-scripts\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.704894 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da43457d-28e7-451d-a937-b7d51e56da99-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.716927 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38228c3a-dc1c-4435-b592-5514407bd351" path="/var/lib/kubelet/pods/38228c3a-dc1c-4435-b592-5514407bd351/volumes" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.718327 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/da43457d-28e7-451d-a937-b7d51e56da99-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.720553 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tg9st\" (UniqueName: \"kubernetes.io/projected/da43457d-28e7-451d-a937-b7d51e56da99-kube-api-access-tg9st\") pod \"ceilometer-0\" (UID: \"da43457d-28e7-451d-a937-b7d51e56da99\") " pod="openstack/ceilometer-0" Sep 30 01:27:49 crc kubenswrapper[4809]: I0930 01:27:49.900895 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 01:27:50 crc kubenswrapper[4809]: I0930 01:27:50.463944 4809 generic.go:334] "Generic (PLEG): container finished" podID="9c58816c-9134-4995-aaa0-6703afaf058c" containerID="c88818285b0b04aba003107a6341cdab2835254c5f3431a292d4d79fc917ff3b" exitCode=0 Sep 30 01:27:50 crc kubenswrapper[4809]: I0930 01:27:50.464388 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"9c58816c-9134-4995-aaa0-6703afaf058c","Type":"ContainerDied","Data":"c88818285b0b04aba003107a6341cdab2835254c5f3431a292d4d79fc917ff3b"} Sep 30 01:27:50 crc kubenswrapper[4809]: I0930 01:27:50.464467 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"9c58816c-9134-4995-aaa0-6703afaf058c","Type":"ContainerDied","Data":"3adb634454ae941062fa1254b37b6a1991ef209a62e8496746271d7ea43508a0"} Sep 30 01:27:50 crc kubenswrapper[4809]: I0930 01:27:50.464449 4809 generic.go:334] "Generic (PLEG): container finished" podID="9c58816c-9134-4995-aaa0-6703afaf058c" containerID="3adb634454ae941062fa1254b37b6a1991ef209a62e8496746271d7ea43508a0" exitCode=1 Sep 30 01:27:50 crc kubenswrapper[4809]: I0930 01:27:50.480380 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="f0bb1ff8-331b-444d-a312-0e4eb87e39f5" containerName="manila-scheduler" containerID="cri-o://b7af432a4fb18dc4e5c76e0fef8a14c487f6703bef114e572523cb425ab45be6" gracePeriod=30 Sep 30 01:27:50 crc kubenswrapper[4809]: I0930 01:27:50.480528 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="f0bb1ff8-331b-444d-a312-0e4eb87e39f5" containerName="probe" containerID="cri-o://f6a5cd1237e386da1e17b4c2a50dcf91730017d496b9fe8238afbf56cfa324e2" gracePeriod=30 Sep 30 01:27:50 crc kubenswrapper[4809]: I0930 01:27:50.550113 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.148323 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.240508 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-config-data\") pod \"9c58816c-9134-4995-aaa0-6703afaf058c\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.240598 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-combined-ca-bundle\") pod \"9c58816c-9134-4995-aaa0-6703afaf058c\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.240701 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7dhn\" (UniqueName: \"kubernetes.io/projected/9c58816c-9134-4995-aaa0-6703afaf058c-kube-api-access-z7dhn\") pod \"9c58816c-9134-4995-aaa0-6703afaf058c\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.240809 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/9c58816c-9134-4995-aaa0-6703afaf058c-var-lib-manila\") pod \"9c58816c-9134-4995-aaa0-6703afaf058c\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.240865 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c58816c-9134-4995-aaa0-6703afaf058c-etc-machine-id\") pod \"9c58816c-9134-4995-aaa0-6703afaf058c\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.240922 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-scripts\") pod \"9c58816c-9134-4995-aaa0-6703afaf058c\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.240954 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9c58816c-9134-4995-aaa0-6703afaf058c-ceph\") pod \"9c58816c-9134-4995-aaa0-6703afaf058c\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.240985 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-config-data-custom\") pod \"9c58816c-9134-4995-aaa0-6703afaf058c\" (UID: \"9c58816c-9134-4995-aaa0-6703afaf058c\") " Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.241443 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9c58816c-9134-4995-aaa0-6703afaf058c-var-lib-manila" (OuterVolumeSpecName: "var-lib-manila") pod "9c58816c-9134-4995-aaa0-6703afaf058c" (UID: "9c58816c-9134-4995-aaa0-6703afaf058c"). InnerVolumeSpecName "var-lib-manila". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.243841 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9c58816c-9134-4995-aaa0-6703afaf058c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9c58816c-9134-4995-aaa0-6703afaf058c" (UID: "9c58816c-9134-4995-aaa0-6703afaf058c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.255925 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c58816c-9134-4995-aaa0-6703afaf058c-kube-api-access-z7dhn" (OuterVolumeSpecName: "kube-api-access-z7dhn") pod "9c58816c-9134-4995-aaa0-6703afaf058c" (UID: "9c58816c-9134-4995-aaa0-6703afaf058c"). InnerVolumeSpecName "kube-api-access-z7dhn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.256439 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-scripts" (OuterVolumeSpecName: "scripts") pod "9c58816c-9134-4995-aaa0-6703afaf058c" (UID: "9c58816c-9134-4995-aaa0-6703afaf058c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.256844 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c58816c-9134-4995-aaa0-6703afaf058c-ceph" (OuterVolumeSpecName: "ceph") pod "9c58816c-9134-4995-aaa0-6703afaf058c" (UID: "9c58816c-9134-4995-aaa0-6703afaf058c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.256982 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9c58816c-9134-4995-aaa0-6703afaf058c" (UID: "9c58816c-9134-4995-aaa0-6703afaf058c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.341306 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9c58816c-9134-4995-aaa0-6703afaf058c" (UID: "9c58816c-9134-4995-aaa0-6703afaf058c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.343261 4809 reconciler_common.go:293] "Volume detached for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/9c58816c-9134-4995-aaa0-6703afaf058c-var-lib-manila\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.343288 4809 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c58816c-9134-4995-aaa0-6703afaf058c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.343298 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.343306 4809 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9c58816c-9134-4995-aaa0-6703afaf058c-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.343315 4809 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.343324 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.343333 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7dhn\" (UniqueName: \"kubernetes.io/projected/9c58816c-9134-4995-aaa0-6703afaf058c-kube-api-access-z7dhn\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.345801 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.399625 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-config-data" (OuterVolumeSpecName: "config-data") pod "9c58816c-9134-4995-aaa0-6703afaf058c" (UID: "9c58816c-9134-4995-aaa0-6703afaf058c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.444481 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zw6dx\" (UniqueName: \"kubernetes.io/projected/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-kube-api-access-zw6dx\") pod \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.444554 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-combined-ca-bundle\") pod \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.444592 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-horizon-secret-key\") pod \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.444653 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-scripts\") pod \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.444724 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-logs\") pod \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.444787 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-config-data\") pod \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.444846 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-horizon-tls-certs\") pod \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\" (UID: \"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6\") " Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.445257 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c58816c-9134-4995-aaa0-6703afaf058c-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.447712 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-logs" (OuterVolumeSpecName: "logs") pod "9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" (UID: "9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.447983 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-kube-api-access-zw6dx" (OuterVolumeSpecName: "kube-api-access-zw6dx") pod "9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" (UID: "9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6"). InnerVolumeSpecName "kube-api-access-zw6dx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.450977 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" (UID: "9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.474681 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" (UID: "9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.475936 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-scripts" (OuterVolumeSpecName: "scripts") pod "9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" (UID: "9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.477818 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-config-data" (OuterVolumeSpecName: "config-data") pod "9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" (UID: "9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.494840 4809 generic.go:334] "Generic (PLEG): container finished" podID="f0bb1ff8-331b-444d-a312-0e4eb87e39f5" containerID="f6a5cd1237e386da1e17b4c2a50dcf91730017d496b9fe8238afbf56cfa324e2" exitCode=0 Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.494910 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"f0bb1ff8-331b-444d-a312-0e4eb87e39f5","Type":"ContainerDied","Data":"f6a5cd1237e386da1e17b4c2a50dcf91730017d496b9fe8238afbf56cfa324e2"} Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.497316 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"da43457d-28e7-451d-a937-b7d51e56da99","Type":"ContainerStarted","Data":"4b23c1c197099e005c9fe0dcbcb3f6c4cbb7846e396aeb730c994886951e1ed0"} Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.497360 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"da43457d-28e7-451d-a937-b7d51e56da99","Type":"ContainerStarted","Data":"12ce4a6b49bf1b1dfb3caec89a9bc39d60d6229355774b7b451cbfc682037b18"} Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.500548 4809 generic.go:334] "Generic (PLEG): container finished" podID="9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" containerID="05040e066a6d39d4dfee1a460b39966b674834bf20e71951ad0508f8106b35ef" exitCode=137 Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.500599 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64c7465c7b-5tvxt" event={"ID":"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6","Type":"ContainerDied","Data":"05040e066a6d39d4dfee1a460b39966b674834bf20e71951ad0508f8106b35ef"} Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.500619 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64c7465c7b-5tvxt" event={"ID":"9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6","Type":"ContainerDied","Data":"818e18ea01df8668112cb6d235b8151cfdd84d32e68299cb4cbbcbf0633a0061"} Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.500635 4809 scope.go:117] "RemoveContainer" containerID="24ccdff97e5103c902dd96e090324c8fb2e583c7ee9b8f2dc6502c2520ff71a5" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.500655 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64c7465c7b-5tvxt" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.506380 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"9c58816c-9134-4995-aaa0-6703afaf058c","Type":"ContainerDied","Data":"766f269fb184137b25cf31fc367bfd26270a11ced5dbd236bd1db4255d242b8d"} Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.506470 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.513737 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" (UID: "9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.551490 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.551835 4809 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-logs\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.551844 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.551853 4809 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.551863 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zw6dx\" (UniqueName: \"kubernetes.io/projected/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-kube-api-access-zw6dx\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.551876 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.551886 4809 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.566800 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.594062 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-share-share1-0"] Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.603795 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Sep 30 01:27:51 crc kubenswrapper[4809]: E0930 01:27:51.604216 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c58816c-9134-4995-aaa0-6703afaf058c" containerName="probe" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.604228 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c58816c-9134-4995-aaa0-6703afaf058c" containerName="probe" Sep 30 01:27:51 crc kubenswrapper[4809]: E0930 01:27:51.604247 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" containerName="horizon-log" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.604331 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" containerName="horizon-log" Sep 30 01:27:51 crc kubenswrapper[4809]: E0930 01:27:51.604364 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c58816c-9134-4995-aaa0-6703afaf058c" containerName="manila-share" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.604374 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c58816c-9134-4995-aaa0-6703afaf058c" containerName="manila-share" Sep 30 01:27:51 crc kubenswrapper[4809]: E0930 01:27:51.604410 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" containerName="horizon" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.604415 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" containerName="horizon" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.604911 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c58816c-9134-4995-aaa0-6703afaf058c" containerName="manila-share" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.604952 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" containerName="horizon-log" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.604969 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c58816c-9134-4995-aaa0-6703afaf058c" containerName="probe" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.604979 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" containerName="horizon" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.606182 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.615895 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.617811 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.654083 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55dpf\" (UniqueName: \"kubernetes.io/projected/f2c82170-5802-4ca9-a749-84618db7b0b5-kube-api-access-55dpf\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.654121 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/f2c82170-5802-4ca9-a749-84618db7b0b5-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.654221 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f2c82170-5802-4ca9-a749-84618db7b0b5-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.655056 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2c82170-5802-4ca9-a749-84618db7b0b5-scripts\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.655099 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f2c82170-5802-4ca9-a749-84618db7b0b5-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.655131 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f2c82170-5802-4ca9-a749-84618db7b0b5-ceph\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.655153 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c82170-5802-4ca9-a749-84618db7b0b5-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.655176 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2c82170-5802-4ca9-a749-84618db7b0b5-config-data\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.708209 4809 scope.go:117] "RemoveContainer" containerID="05040e066a6d39d4dfee1a460b39966b674834bf20e71951ad0508f8106b35ef" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.710555 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c58816c-9134-4995-aaa0-6703afaf058c" path="/var/lib/kubelet/pods/9c58816c-9134-4995-aaa0-6703afaf058c/volumes" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.758797 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2c82170-5802-4ca9-a749-84618db7b0b5-scripts\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.758857 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f2c82170-5802-4ca9-a749-84618db7b0b5-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.758887 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f2c82170-5802-4ca9-a749-84618db7b0b5-ceph\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.758914 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c82170-5802-4ca9-a749-84618db7b0b5-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.758937 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2c82170-5802-4ca9-a749-84618db7b0b5-config-data\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.759011 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55dpf\" (UniqueName: \"kubernetes.io/projected/f2c82170-5802-4ca9-a749-84618db7b0b5-kube-api-access-55dpf\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.759028 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/f2c82170-5802-4ca9-a749-84618db7b0b5-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.759079 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f2c82170-5802-4ca9-a749-84618db7b0b5-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.760820 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/f2c82170-5802-4ca9-a749-84618db7b0b5-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.760907 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f2c82170-5802-4ca9-a749-84618db7b0b5-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.761795 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2c82170-5802-4ca9-a749-84618db7b0b5-scripts\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.764885 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c82170-5802-4ca9-a749-84618db7b0b5-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.766425 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2c82170-5802-4ca9-a749-84618db7b0b5-config-data\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.767141 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f2c82170-5802-4ca9-a749-84618db7b0b5-ceph\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.773664 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f2c82170-5802-4ca9-a749-84618db7b0b5-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.781243 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55dpf\" (UniqueName: \"kubernetes.io/projected/f2c82170-5802-4ca9-a749-84618db7b0b5-kube-api-access-55dpf\") pod \"manila-share-share1-0\" (UID: \"f2c82170-5802-4ca9-a749-84618db7b0b5\") " pod="openstack/manila-share-share1-0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.851972 4809 scope.go:117] "RemoveContainer" containerID="24ccdff97e5103c902dd96e090324c8fb2e583c7ee9b8f2dc6502c2520ff71a5" Sep 30 01:27:51 crc kubenswrapper[4809]: E0930 01:27:51.853333 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24ccdff97e5103c902dd96e090324c8fb2e583c7ee9b8f2dc6502c2520ff71a5\": container with ID starting with 24ccdff97e5103c902dd96e090324c8fb2e583c7ee9b8f2dc6502c2520ff71a5 not found: ID does not exist" containerID="24ccdff97e5103c902dd96e090324c8fb2e583c7ee9b8f2dc6502c2520ff71a5" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.853378 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24ccdff97e5103c902dd96e090324c8fb2e583c7ee9b8f2dc6502c2520ff71a5"} err="failed to get container status \"24ccdff97e5103c902dd96e090324c8fb2e583c7ee9b8f2dc6502c2520ff71a5\": rpc error: code = NotFound desc = could not find container \"24ccdff97e5103c902dd96e090324c8fb2e583c7ee9b8f2dc6502c2520ff71a5\": container with ID starting with 24ccdff97e5103c902dd96e090324c8fb2e583c7ee9b8f2dc6502c2520ff71a5 not found: ID does not exist" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.853403 4809 scope.go:117] "RemoveContainer" containerID="05040e066a6d39d4dfee1a460b39966b674834bf20e71951ad0508f8106b35ef" Sep 30 01:27:51 crc kubenswrapper[4809]: E0930 01:27:51.853859 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05040e066a6d39d4dfee1a460b39966b674834bf20e71951ad0508f8106b35ef\": container with ID starting with 05040e066a6d39d4dfee1a460b39966b674834bf20e71951ad0508f8106b35ef not found: ID does not exist" containerID="05040e066a6d39d4dfee1a460b39966b674834bf20e71951ad0508f8106b35ef" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.853901 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05040e066a6d39d4dfee1a460b39966b674834bf20e71951ad0508f8106b35ef"} err="failed to get container status \"05040e066a6d39d4dfee1a460b39966b674834bf20e71951ad0508f8106b35ef\": rpc error: code = NotFound desc = could not find container \"05040e066a6d39d4dfee1a460b39966b674834bf20e71951ad0508f8106b35ef\": container with ID starting with 05040e066a6d39d4dfee1a460b39966b674834bf20e71951ad0508f8106b35ef not found: ID does not exist" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.853924 4809 scope.go:117] "RemoveContainer" containerID="c88818285b0b04aba003107a6341cdab2835254c5f3431a292d4d79fc917ff3b" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.874804 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-64c7465c7b-5tvxt"] Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.886309 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-64c7465c7b-5tvxt"] Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.888560 4809 scope.go:117] "RemoveContainer" containerID="3adb634454ae941062fa1254b37b6a1991ef209a62e8496746271d7ea43508a0" Sep 30 01:27:51 crc kubenswrapper[4809]: I0930 01:27:51.970526 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Sep 30 01:27:52 crc kubenswrapper[4809]: I0930 01:27:52.532322 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"da43457d-28e7-451d-a937-b7d51e56da99","Type":"ContainerStarted","Data":"789a7d9e3eea1a51fee2b1576c7e6887751044e6cc0ed2e1c66715615cf4c6d8"} Sep 30 01:27:52 crc kubenswrapper[4809]: I0930 01:27:52.664775 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Sep 30 01:27:53 crc kubenswrapper[4809]: I0930 01:27:53.557154 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"da43457d-28e7-451d-a937-b7d51e56da99","Type":"ContainerStarted","Data":"18894f3e1184b2145e17bb2ecc7033ce3e8db09fb027d6e2839fb0187f064911"} Sep 30 01:27:53 crc kubenswrapper[4809]: I0930 01:27:53.562114 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"f2c82170-5802-4ca9-a749-84618db7b0b5","Type":"ContainerStarted","Data":"4a2b046e8309607561b863f954ee4e29c2c29d7c9a2b6432d7e66742a51e66b0"} Sep 30 01:27:53 crc kubenswrapper[4809]: I0930 01:27:53.727560 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6" path="/var/lib/kubelet/pods/9e3618c6-e9f9-47c0-854d-dcdfc3b0cdc6/volumes" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.327779 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.425895 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-etc-machine-id\") pod \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.425972 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-combined-ca-bundle\") pod \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.426092 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-config-data\") pod \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.426170 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fflnv\" (UniqueName: \"kubernetes.io/projected/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-kube-api-access-fflnv\") pod \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.426207 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-config-data-custom\") pod \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.426235 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-scripts\") pod \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\" (UID: \"f0bb1ff8-331b-444d-a312-0e4eb87e39f5\") " Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.427008 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f0bb1ff8-331b-444d-a312-0e4eb87e39f5" (UID: "f0bb1ff8-331b-444d-a312-0e4eb87e39f5"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.432118 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-kube-api-access-fflnv" (OuterVolumeSpecName: "kube-api-access-fflnv") pod "f0bb1ff8-331b-444d-a312-0e4eb87e39f5" (UID: "f0bb1ff8-331b-444d-a312-0e4eb87e39f5"). InnerVolumeSpecName "kube-api-access-fflnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.450752 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f0bb1ff8-331b-444d-a312-0e4eb87e39f5" (UID: "f0bb1ff8-331b-444d-a312-0e4eb87e39f5"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.453804 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-scripts" (OuterVolumeSpecName: "scripts") pod "f0bb1ff8-331b-444d-a312-0e4eb87e39f5" (UID: "f0bb1ff8-331b-444d-a312-0e4eb87e39f5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.502710 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f0bb1ff8-331b-444d-a312-0e4eb87e39f5" (UID: "f0bb1ff8-331b-444d-a312-0e4eb87e39f5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.529152 4809 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.529185 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.529196 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fflnv\" (UniqueName: \"kubernetes.io/projected/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-kube-api-access-fflnv\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.529206 4809 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.529214 4809 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.562296 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-config-data" (OuterVolumeSpecName: "config-data") pod "f0bb1ff8-331b-444d-a312-0e4eb87e39f5" (UID: "f0bb1ff8-331b-444d-a312-0e4eb87e39f5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.581819 4809 generic.go:334] "Generic (PLEG): container finished" podID="f0bb1ff8-331b-444d-a312-0e4eb87e39f5" containerID="b7af432a4fb18dc4e5c76e0fef8a14c487f6703bef114e572523cb425ab45be6" exitCode=0 Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.581873 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.581904 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"f0bb1ff8-331b-444d-a312-0e4eb87e39f5","Type":"ContainerDied","Data":"b7af432a4fb18dc4e5c76e0fef8a14c487f6703bef114e572523cb425ab45be6"} Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.581933 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"f0bb1ff8-331b-444d-a312-0e4eb87e39f5","Type":"ContainerDied","Data":"4ccfd4daeaf76cfd0e76e26fef9c3d604d127e405fa92015b0c073b52d23209c"} Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.581949 4809 scope.go:117] "RemoveContainer" containerID="f6a5cd1237e386da1e17b4c2a50dcf91730017d496b9fe8238afbf56cfa324e2" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.587263 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"f2c82170-5802-4ca9-a749-84618db7b0b5","Type":"ContainerStarted","Data":"cfaf0e9c636e4b38c1be3a3eff69b5fdeeeb85c82513d8dbd02a412bb211a52f"} Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.587364 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"f2c82170-5802-4ca9-a749-84618db7b0b5","Type":"ContainerStarted","Data":"a316d8c582ae664e7b019de612cdbcfbdd5413584ee91d893b4394dbd99216ca"} Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.608841 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=3.608819092 podStartE2EDuration="3.608819092s" podCreationTimestamp="2025-09-30 01:27:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 01:27:54.606107657 +0000 UTC m=+4725.642357065" watchObservedRunningTime="2025-09-30 01:27:54.608819092 +0000 UTC m=+4725.645068500" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.619679 4809 scope.go:117] "RemoveContainer" containerID="b7af432a4fb18dc4e5c76e0fef8a14c487f6703bef114e572523cb425ab45be6" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.631992 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0bb1ff8-331b-444d-a312-0e4eb87e39f5-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.644276 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.677766 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-scheduler-0"] Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.683530 4809 scope.go:117] "RemoveContainer" containerID="f6a5cd1237e386da1e17b4c2a50dcf91730017d496b9fe8238afbf56cfa324e2" Sep 30 01:27:54 crc kubenswrapper[4809]: E0930 01:27:54.684068 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6a5cd1237e386da1e17b4c2a50dcf91730017d496b9fe8238afbf56cfa324e2\": container with ID starting with f6a5cd1237e386da1e17b4c2a50dcf91730017d496b9fe8238afbf56cfa324e2 not found: ID does not exist" containerID="f6a5cd1237e386da1e17b4c2a50dcf91730017d496b9fe8238afbf56cfa324e2" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.684095 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6a5cd1237e386da1e17b4c2a50dcf91730017d496b9fe8238afbf56cfa324e2"} err="failed to get container status \"f6a5cd1237e386da1e17b4c2a50dcf91730017d496b9fe8238afbf56cfa324e2\": rpc error: code = NotFound desc = could not find container \"f6a5cd1237e386da1e17b4c2a50dcf91730017d496b9fe8238afbf56cfa324e2\": container with ID starting with f6a5cd1237e386da1e17b4c2a50dcf91730017d496b9fe8238afbf56cfa324e2 not found: ID does not exist" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.684114 4809 scope.go:117] "RemoveContainer" containerID="b7af432a4fb18dc4e5c76e0fef8a14c487f6703bef114e572523cb425ab45be6" Sep 30 01:27:54 crc kubenswrapper[4809]: E0930 01:27:54.684605 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7af432a4fb18dc4e5c76e0fef8a14c487f6703bef114e572523cb425ab45be6\": container with ID starting with b7af432a4fb18dc4e5c76e0fef8a14c487f6703bef114e572523cb425ab45be6 not found: ID does not exist" containerID="b7af432a4fb18dc4e5c76e0fef8a14c487f6703bef114e572523cb425ab45be6" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.684627 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7af432a4fb18dc4e5c76e0fef8a14c487f6703bef114e572523cb425ab45be6"} err="failed to get container status \"b7af432a4fb18dc4e5c76e0fef8a14c487f6703bef114e572523cb425ab45be6\": rpc error: code = NotFound desc = could not find container \"b7af432a4fb18dc4e5c76e0fef8a14c487f6703bef114e572523cb425ab45be6\": container with ID starting with b7af432a4fb18dc4e5c76e0fef8a14c487f6703bef114e572523cb425ab45be6 not found: ID does not exist" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.702907 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Sep 30 01:27:54 crc kubenswrapper[4809]: E0930 01:27:54.703415 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0bb1ff8-331b-444d-a312-0e4eb87e39f5" containerName="manila-scheduler" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.703427 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0bb1ff8-331b-444d-a312-0e4eb87e39f5" containerName="manila-scheduler" Sep 30 01:27:54 crc kubenswrapper[4809]: E0930 01:27:54.703464 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0bb1ff8-331b-444d-a312-0e4eb87e39f5" containerName="probe" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.703471 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0bb1ff8-331b-444d-a312-0e4eb87e39f5" containerName="probe" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.703762 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0bb1ff8-331b-444d-a312-0e4eb87e39f5" containerName="probe" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.703780 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0bb1ff8-331b-444d-a312-0e4eb87e39f5" containerName="manila-scheduler" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.705153 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.711166 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.731694 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.837541 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8ad1bd72-2d1a-4de5-891c-573971953aa1-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"8ad1bd72-2d1a-4de5-891c-573971953aa1\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.837580 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ad1bd72-2d1a-4de5-891c-573971953aa1-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"8ad1bd72-2d1a-4de5-891c-573971953aa1\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.837606 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8ad1bd72-2d1a-4de5-891c-573971953aa1-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"8ad1bd72-2d1a-4de5-891c-573971953aa1\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.837664 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ad1bd72-2d1a-4de5-891c-573971953aa1-scripts\") pod \"manila-scheduler-0\" (UID: \"8ad1bd72-2d1a-4de5-891c-573971953aa1\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.837681 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ad1bd72-2d1a-4de5-891c-573971953aa1-config-data\") pod \"manila-scheduler-0\" (UID: \"8ad1bd72-2d1a-4de5-891c-573971953aa1\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.837707 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4lrv\" (UniqueName: \"kubernetes.io/projected/8ad1bd72-2d1a-4de5-891c-573971953aa1-kube-api-access-f4lrv\") pod \"manila-scheduler-0\" (UID: \"8ad1bd72-2d1a-4de5-891c-573971953aa1\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.940017 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8ad1bd72-2d1a-4de5-891c-573971953aa1-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"8ad1bd72-2d1a-4de5-891c-573971953aa1\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.940339 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ad1bd72-2d1a-4de5-891c-573971953aa1-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"8ad1bd72-2d1a-4de5-891c-573971953aa1\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.940367 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8ad1bd72-2d1a-4de5-891c-573971953aa1-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"8ad1bd72-2d1a-4de5-891c-573971953aa1\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.940409 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ad1bd72-2d1a-4de5-891c-573971953aa1-scripts\") pod \"manila-scheduler-0\" (UID: \"8ad1bd72-2d1a-4de5-891c-573971953aa1\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.940427 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ad1bd72-2d1a-4de5-891c-573971953aa1-config-data\") pod \"manila-scheduler-0\" (UID: \"8ad1bd72-2d1a-4de5-891c-573971953aa1\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.940462 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4lrv\" (UniqueName: \"kubernetes.io/projected/8ad1bd72-2d1a-4de5-891c-573971953aa1-kube-api-access-f4lrv\") pod \"manila-scheduler-0\" (UID: \"8ad1bd72-2d1a-4de5-891c-573971953aa1\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.940730 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8ad1bd72-2d1a-4de5-891c-573971953aa1-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"8ad1bd72-2d1a-4de5-891c-573971953aa1\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.944451 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8ad1bd72-2d1a-4de5-891c-573971953aa1-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"8ad1bd72-2d1a-4de5-891c-573971953aa1\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.945745 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ad1bd72-2d1a-4de5-891c-573971953aa1-config-data\") pod \"manila-scheduler-0\" (UID: \"8ad1bd72-2d1a-4de5-891c-573971953aa1\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.947463 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ad1bd72-2d1a-4de5-891c-573971953aa1-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"8ad1bd72-2d1a-4de5-891c-573971953aa1\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.949971 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ad1bd72-2d1a-4de5-891c-573971953aa1-scripts\") pod \"manila-scheduler-0\" (UID: \"8ad1bd72-2d1a-4de5-891c-573971953aa1\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:54 crc kubenswrapper[4809]: I0930 01:27:54.959528 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4lrv\" (UniqueName: \"kubernetes.io/projected/8ad1bd72-2d1a-4de5-891c-573971953aa1-kube-api-access-f4lrv\") pod \"manila-scheduler-0\" (UID: \"8ad1bd72-2d1a-4de5-891c-573971953aa1\") " pod="openstack/manila-scheduler-0" Sep 30 01:27:55 crc kubenswrapper[4809]: I0930 01:27:55.051539 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Sep 30 01:27:55 crc kubenswrapper[4809]: I0930 01:27:55.057409 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Sep 30 01:27:55 crc kubenswrapper[4809]: I0930 01:27:55.594960 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Sep 30 01:27:55 crc kubenswrapper[4809]: I0930 01:27:55.628994 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"da43457d-28e7-451d-a937-b7d51e56da99","Type":"ContainerStarted","Data":"27a1391d8cb975df933e37be4ad56bb394e0c09b93098e3cd96e972ccf8a900b"} Sep 30 01:27:55 crc kubenswrapper[4809]: I0930 01:27:55.629518 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 01:27:55 crc kubenswrapper[4809]: I0930 01:27:55.703583 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0bb1ff8-331b-444d-a312-0e4eb87e39f5" path="/var/lib/kubelet/pods/f0bb1ff8-331b-444d-a312-0e4eb87e39f5/volumes" Sep 30 01:27:56 crc kubenswrapper[4809]: I0930 01:27:56.643583 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"8ad1bd72-2d1a-4de5-891c-573971953aa1","Type":"ContainerStarted","Data":"b6fd2a2261329081cc793c61372356cba70656b15e1611f04d3910817431451b"} Sep 30 01:27:56 crc kubenswrapper[4809]: I0930 01:27:56.644202 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"8ad1bd72-2d1a-4de5-891c-573971953aa1","Type":"ContainerStarted","Data":"dc3d246fbfd0cf62e4a8d15a924cf6f337d9b14e17d6e0f2ceef9de953db3934"} Sep 30 01:27:56 crc kubenswrapper[4809]: I0930 01:27:56.644218 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"8ad1bd72-2d1a-4de5-891c-573971953aa1","Type":"ContainerStarted","Data":"3ba9833a505df5f22d2bd89308d3d7e6da174deb3d0ffbe061f4a39c0c169620"} Sep 30 01:27:56 crc kubenswrapper[4809]: I0930 01:27:56.661841 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=2.661819576 podStartE2EDuration="2.661819576s" podCreationTimestamp="2025-09-30 01:27:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 01:27:56.658371931 +0000 UTC m=+4727.694621339" watchObservedRunningTime="2025-09-30 01:27:56.661819576 +0000 UTC m=+4727.698068984" Sep 30 01:27:56 crc kubenswrapper[4809]: I0930 01:27:56.663988 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.634369079 podStartE2EDuration="7.663981035s" podCreationTimestamp="2025-09-30 01:27:49 +0000 UTC" firstStartedPulling="2025-09-30 01:27:50.722825165 +0000 UTC m=+4721.759074573" lastFinishedPulling="2025-09-30 01:27:54.752437121 +0000 UTC m=+4725.788686529" observedRunningTime="2025-09-30 01:27:55.665314609 +0000 UTC m=+4726.701564007" watchObservedRunningTime="2025-09-30 01:27:56.663981035 +0000 UTC m=+4727.700230443" Sep 30 01:28:01 crc kubenswrapper[4809]: I0930 01:28:01.971617 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Sep 30 01:28:05 crc kubenswrapper[4809]: I0930 01:28:05.057903 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Sep 30 01:28:13 crc kubenswrapper[4809]: I0930 01:28:13.490698 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Sep 30 01:28:16 crc kubenswrapper[4809]: I0930 01:28:16.605328 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Sep 30 01:28:18 crc kubenswrapper[4809]: I0930 01:28:18.344955 4809 scope.go:117] "RemoveContainer" containerID="7c02d1996d01e573adbf429c70debdb892204128d1239b0286342b8a6ee46524" Sep 30 01:28:18 crc kubenswrapper[4809]: I0930 01:28:18.396899 4809 scope.go:117] "RemoveContainer" containerID="64f265b9f3640b0bf65dd37eccf84f09e55dac5818dd9b8ed55f62ebe4a80b11" Sep 30 01:28:19 crc kubenswrapper[4809]: I0930 01:28:19.925374 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 01:28:25 crc kubenswrapper[4809]: I0930 01:28:25.324923 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:28:25 crc kubenswrapper[4809]: I0930 01:28:25.325958 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:28:42 crc kubenswrapper[4809]: I0930 01:28:42.120766 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cf5vr"] Sep 30 01:28:42 crc kubenswrapper[4809]: I0930 01:28:42.125036 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cf5vr" Sep 30 01:28:42 crc kubenswrapper[4809]: I0930 01:28:42.134402 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cf5vr"] Sep 30 01:28:42 crc kubenswrapper[4809]: I0930 01:28:42.330804 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t98jw\" (UniqueName: \"kubernetes.io/projected/ef750905-629d-47f7-80d2-b7dba63db5f3-kube-api-access-t98jw\") pod \"community-operators-cf5vr\" (UID: \"ef750905-629d-47f7-80d2-b7dba63db5f3\") " pod="openshift-marketplace/community-operators-cf5vr" Sep 30 01:28:42 crc kubenswrapper[4809]: I0930 01:28:42.330852 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef750905-629d-47f7-80d2-b7dba63db5f3-utilities\") pod \"community-operators-cf5vr\" (UID: \"ef750905-629d-47f7-80d2-b7dba63db5f3\") " pod="openshift-marketplace/community-operators-cf5vr" Sep 30 01:28:42 crc kubenswrapper[4809]: I0930 01:28:42.331032 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef750905-629d-47f7-80d2-b7dba63db5f3-catalog-content\") pod \"community-operators-cf5vr\" (UID: \"ef750905-629d-47f7-80d2-b7dba63db5f3\") " pod="openshift-marketplace/community-operators-cf5vr" Sep 30 01:28:42 crc kubenswrapper[4809]: I0930 01:28:42.433027 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t98jw\" (UniqueName: \"kubernetes.io/projected/ef750905-629d-47f7-80d2-b7dba63db5f3-kube-api-access-t98jw\") pod \"community-operators-cf5vr\" (UID: \"ef750905-629d-47f7-80d2-b7dba63db5f3\") " pod="openshift-marketplace/community-operators-cf5vr" Sep 30 01:28:42 crc kubenswrapper[4809]: I0930 01:28:42.433075 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef750905-629d-47f7-80d2-b7dba63db5f3-utilities\") pod \"community-operators-cf5vr\" (UID: \"ef750905-629d-47f7-80d2-b7dba63db5f3\") " pod="openshift-marketplace/community-operators-cf5vr" Sep 30 01:28:42 crc kubenswrapper[4809]: I0930 01:28:42.433154 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef750905-629d-47f7-80d2-b7dba63db5f3-catalog-content\") pod \"community-operators-cf5vr\" (UID: \"ef750905-629d-47f7-80d2-b7dba63db5f3\") " pod="openshift-marketplace/community-operators-cf5vr" Sep 30 01:28:42 crc kubenswrapper[4809]: I0930 01:28:42.433745 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef750905-629d-47f7-80d2-b7dba63db5f3-catalog-content\") pod \"community-operators-cf5vr\" (UID: \"ef750905-629d-47f7-80d2-b7dba63db5f3\") " pod="openshift-marketplace/community-operators-cf5vr" Sep 30 01:28:42 crc kubenswrapper[4809]: I0930 01:28:42.433878 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef750905-629d-47f7-80d2-b7dba63db5f3-utilities\") pod \"community-operators-cf5vr\" (UID: \"ef750905-629d-47f7-80d2-b7dba63db5f3\") " pod="openshift-marketplace/community-operators-cf5vr" Sep 30 01:28:42 crc kubenswrapper[4809]: I0930 01:28:42.454197 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t98jw\" (UniqueName: \"kubernetes.io/projected/ef750905-629d-47f7-80d2-b7dba63db5f3-kube-api-access-t98jw\") pod \"community-operators-cf5vr\" (UID: \"ef750905-629d-47f7-80d2-b7dba63db5f3\") " pod="openshift-marketplace/community-operators-cf5vr" Sep 30 01:28:42 crc kubenswrapper[4809]: I0930 01:28:42.500058 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cf5vr" Sep 30 01:28:43 crc kubenswrapper[4809]: I0930 01:28:43.094206 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cf5vr"] Sep 30 01:28:44 crc kubenswrapper[4809]: I0930 01:28:44.268896 4809 generic.go:334] "Generic (PLEG): container finished" podID="ef750905-629d-47f7-80d2-b7dba63db5f3" containerID="bd69b1cbd622313c42a2905f6de8d68ca56445119c5444d4128e038559910c42" exitCode=0 Sep 30 01:28:44 crc kubenswrapper[4809]: I0930 01:28:44.268998 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cf5vr" event={"ID":"ef750905-629d-47f7-80d2-b7dba63db5f3","Type":"ContainerDied","Data":"bd69b1cbd622313c42a2905f6de8d68ca56445119c5444d4128e038559910c42"} Sep 30 01:28:44 crc kubenswrapper[4809]: I0930 01:28:44.269216 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cf5vr" event={"ID":"ef750905-629d-47f7-80d2-b7dba63db5f3","Type":"ContainerStarted","Data":"537f80769fbb957b85c666048b18f3fead390eaaf769565ed66623c220d5f90e"} Sep 30 01:28:46 crc kubenswrapper[4809]: I0930 01:28:46.292803 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cf5vr" event={"ID":"ef750905-629d-47f7-80d2-b7dba63db5f3","Type":"ContainerStarted","Data":"fafaf3a8b0433b42dd36e6d8d8acc403cef02591abc081a5a3ed6927bb27330b"} Sep 30 01:28:47 crc kubenswrapper[4809]: I0930 01:28:47.309347 4809 generic.go:334] "Generic (PLEG): container finished" podID="ef750905-629d-47f7-80d2-b7dba63db5f3" containerID="fafaf3a8b0433b42dd36e6d8d8acc403cef02591abc081a5a3ed6927bb27330b" exitCode=0 Sep 30 01:28:47 crc kubenswrapper[4809]: I0930 01:28:47.309420 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cf5vr" event={"ID":"ef750905-629d-47f7-80d2-b7dba63db5f3","Type":"ContainerDied","Data":"fafaf3a8b0433b42dd36e6d8d8acc403cef02591abc081a5a3ed6927bb27330b"} Sep 30 01:28:48 crc kubenswrapper[4809]: I0930 01:28:48.320438 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cf5vr" event={"ID":"ef750905-629d-47f7-80d2-b7dba63db5f3","Type":"ContainerStarted","Data":"0843993a2dd4db72fcdc7929077408bf6c8cd5512855c1983c55038a69edc611"} Sep 30 01:28:48 crc kubenswrapper[4809]: I0930 01:28:48.351835 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cf5vr" podStartSLOduration=2.86841275 podStartE2EDuration="6.351809094s" podCreationTimestamp="2025-09-30 01:28:42 +0000 UTC" firstStartedPulling="2025-09-30 01:28:44.270613569 +0000 UTC m=+4775.306862977" lastFinishedPulling="2025-09-30 01:28:47.754009883 +0000 UTC m=+4778.790259321" observedRunningTime="2025-09-30 01:28:48.338185839 +0000 UTC m=+4779.374435267" watchObservedRunningTime="2025-09-30 01:28:48.351809094 +0000 UTC m=+4779.388058512" Sep 30 01:28:52 crc kubenswrapper[4809]: I0930 01:28:52.501972 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cf5vr" Sep 30 01:28:52 crc kubenswrapper[4809]: I0930 01:28:52.502580 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cf5vr" Sep 30 01:28:53 crc kubenswrapper[4809]: I0930 01:28:53.611682 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-cf5vr" podUID="ef750905-629d-47f7-80d2-b7dba63db5f3" containerName="registry-server" probeResult="failure" output=< Sep 30 01:28:53 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 01:28:53 crc kubenswrapper[4809]: > Sep 30 01:28:55 crc kubenswrapper[4809]: I0930 01:28:55.325135 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:28:55 crc kubenswrapper[4809]: I0930 01:28:55.325583 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:29:02 crc kubenswrapper[4809]: I0930 01:29:02.560099 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cf5vr" Sep 30 01:29:02 crc kubenswrapper[4809]: I0930 01:29:02.635802 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cf5vr" Sep 30 01:29:02 crc kubenswrapper[4809]: I0930 01:29:02.805139 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cf5vr"] Sep 30 01:29:04 crc kubenswrapper[4809]: I0930 01:29:04.506628 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cf5vr" podUID="ef750905-629d-47f7-80d2-b7dba63db5f3" containerName="registry-server" containerID="cri-o://0843993a2dd4db72fcdc7929077408bf6c8cd5512855c1983c55038a69edc611" gracePeriod=2 Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.193024 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cf5vr" Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.312185 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t98jw\" (UniqueName: \"kubernetes.io/projected/ef750905-629d-47f7-80d2-b7dba63db5f3-kube-api-access-t98jw\") pod \"ef750905-629d-47f7-80d2-b7dba63db5f3\" (UID: \"ef750905-629d-47f7-80d2-b7dba63db5f3\") " Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.312256 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef750905-629d-47f7-80d2-b7dba63db5f3-utilities\") pod \"ef750905-629d-47f7-80d2-b7dba63db5f3\" (UID: \"ef750905-629d-47f7-80d2-b7dba63db5f3\") " Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.312278 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef750905-629d-47f7-80d2-b7dba63db5f3-catalog-content\") pod \"ef750905-629d-47f7-80d2-b7dba63db5f3\" (UID: \"ef750905-629d-47f7-80d2-b7dba63db5f3\") " Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.315333 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef750905-629d-47f7-80d2-b7dba63db5f3-utilities" (OuterVolumeSpecName: "utilities") pod "ef750905-629d-47f7-80d2-b7dba63db5f3" (UID: "ef750905-629d-47f7-80d2-b7dba63db5f3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.319991 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef750905-629d-47f7-80d2-b7dba63db5f3-kube-api-access-t98jw" (OuterVolumeSpecName: "kube-api-access-t98jw") pod "ef750905-629d-47f7-80d2-b7dba63db5f3" (UID: "ef750905-629d-47f7-80d2-b7dba63db5f3"). InnerVolumeSpecName "kube-api-access-t98jw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.369282 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef750905-629d-47f7-80d2-b7dba63db5f3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ef750905-629d-47f7-80d2-b7dba63db5f3" (UID: "ef750905-629d-47f7-80d2-b7dba63db5f3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.415376 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t98jw\" (UniqueName: \"kubernetes.io/projected/ef750905-629d-47f7-80d2-b7dba63db5f3-kube-api-access-t98jw\") on node \"crc\" DevicePath \"\"" Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.415408 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef750905-629d-47f7-80d2-b7dba63db5f3-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.415421 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef750905-629d-47f7-80d2-b7dba63db5f3-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.517924 4809 generic.go:334] "Generic (PLEG): container finished" podID="ef750905-629d-47f7-80d2-b7dba63db5f3" containerID="0843993a2dd4db72fcdc7929077408bf6c8cd5512855c1983c55038a69edc611" exitCode=0 Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.517960 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cf5vr" Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.517980 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cf5vr" event={"ID":"ef750905-629d-47f7-80d2-b7dba63db5f3","Type":"ContainerDied","Data":"0843993a2dd4db72fcdc7929077408bf6c8cd5512855c1983c55038a69edc611"} Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.518328 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cf5vr" event={"ID":"ef750905-629d-47f7-80d2-b7dba63db5f3","Type":"ContainerDied","Data":"537f80769fbb957b85c666048b18f3fead390eaaf769565ed66623c220d5f90e"} Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.518347 4809 scope.go:117] "RemoveContainer" containerID="0843993a2dd4db72fcdc7929077408bf6c8cd5512855c1983c55038a69edc611" Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.555639 4809 scope.go:117] "RemoveContainer" containerID="fafaf3a8b0433b42dd36e6d8d8acc403cef02591abc081a5a3ed6927bb27330b" Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.560435 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cf5vr"] Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.574058 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cf5vr"] Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.579609 4809 scope.go:117] "RemoveContainer" containerID="bd69b1cbd622313c42a2905f6de8d68ca56445119c5444d4128e038559910c42" Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.631575 4809 scope.go:117] "RemoveContainer" containerID="0843993a2dd4db72fcdc7929077408bf6c8cd5512855c1983c55038a69edc611" Sep 30 01:29:05 crc kubenswrapper[4809]: E0930 01:29:05.632095 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0843993a2dd4db72fcdc7929077408bf6c8cd5512855c1983c55038a69edc611\": container with ID starting with 0843993a2dd4db72fcdc7929077408bf6c8cd5512855c1983c55038a69edc611 not found: ID does not exist" containerID="0843993a2dd4db72fcdc7929077408bf6c8cd5512855c1983c55038a69edc611" Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.632132 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0843993a2dd4db72fcdc7929077408bf6c8cd5512855c1983c55038a69edc611"} err="failed to get container status \"0843993a2dd4db72fcdc7929077408bf6c8cd5512855c1983c55038a69edc611\": rpc error: code = NotFound desc = could not find container \"0843993a2dd4db72fcdc7929077408bf6c8cd5512855c1983c55038a69edc611\": container with ID starting with 0843993a2dd4db72fcdc7929077408bf6c8cd5512855c1983c55038a69edc611 not found: ID does not exist" Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.632159 4809 scope.go:117] "RemoveContainer" containerID="fafaf3a8b0433b42dd36e6d8d8acc403cef02591abc081a5a3ed6927bb27330b" Sep 30 01:29:05 crc kubenswrapper[4809]: E0930 01:29:05.632435 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fafaf3a8b0433b42dd36e6d8d8acc403cef02591abc081a5a3ed6927bb27330b\": container with ID starting with fafaf3a8b0433b42dd36e6d8d8acc403cef02591abc081a5a3ed6927bb27330b not found: ID does not exist" containerID="fafaf3a8b0433b42dd36e6d8d8acc403cef02591abc081a5a3ed6927bb27330b" Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.632462 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fafaf3a8b0433b42dd36e6d8d8acc403cef02591abc081a5a3ed6927bb27330b"} err="failed to get container status \"fafaf3a8b0433b42dd36e6d8d8acc403cef02591abc081a5a3ed6927bb27330b\": rpc error: code = NotFound desc = could not find container \"fafaf3a8b0433b42dd36e6d8d8acc403cef02591abc081a5a3ed6927bb27330b\": container with ID starting with fafaf3a8b0433b42dd36e6d8d8acc403cef02591abc081a5a3ed6927bb27330b not found: ID does not exist" Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.632479 4809 scope.go:117] "RemoveContainer" containerID="bd69b1cbd622313c42a2905f6de8d68ca56445119c5444d4128e038559910c42" Sep 30 01:29:05 crc kubenswrapper[4809]: E0930 01:29:05.632892 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd69b1cbd622313c42a2905f6de8d68ca56445119c5444d4128e038559910c42\": container with ID starting with bd69b1cbd622313c42a2905f6de8d68ca56445119c5444d4128e038559910c42 not found: ID does not exist" containerID="bd69b1cbd622313c42a2905f6de8d68ca56445119c5444d4128e038559910c42" Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.632922 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd69b1cbd622313c42a2905f6de8d68ca56445119c5444d4128e038559910c42"} err="failed to get container status \"bd69b1cbd622313c42a2905f6de8d68ca56445119c5444d4128e038559910c42\": rpc error: code = NotFound desc = could not find container \"bd69b1cbd622313c42a2905f6de8d68ca56445119c5444d4128e038559910c42\": container with ID starting with bd69b1cbd622313c42a2905f6de8d68ca56445119c5444d4128e038559910c42 not found: ID does not exist" Sep 30 01:29:05 crc kubenswrapper[4809]: I0930 01:29:05.712227 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef750905-629d-47f7-80d2-b7dba63db5f3" path="/var/lib/kubelet/pods/ef750905-629d-47f7-80d2-b7dba63db5f3/volumes" Sep 30 01:29:25 crc kubenswrapper[4809]: I0930 01:29:25.325325 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:29:25 crc kubenswrapper[4809]: I0930 01:29:25.326149 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:29:25 crc kubenswrapper[4809]: I0930 01:29:25.326226 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 01:29:25 crc kubenswrapper[4809]: I0930 01:29:25.327267 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"19a1442afdfa18c0d27f18c243884121c78e39e1fdc5c50afe899f2c22a5eea6"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 01:29:25 crc kubenswrapper[4809]: I0930 01:29:25.327370 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://19a1442afdfa18c0d27f18c243884121c78e39e1fdc5c50afe899f2c22a5eea6" gracePeriod=600 Sep 30 01:29:25 crc kubenswrapper[4809]: I0930 01:29:25.744949 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="19a1442afdfa18c0d27f18c243884121c78e39e1fdc5c50afe899f2c22a5eea6" exitCode=0 Sep 30 01:29:25 crc kubenswrapper[4809]: I0930 01:29:25.745157 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"19a1442afdfa18c0d27f18c243884121c78e39e1fdc5c50afe899f2c22a5eea6"} Sep 30 01:29:25 crc kubenswrapper[4809]: I0930 01:29:25.745348 4809 scope.go:117] "RemoveContainer" containerID="2f69dc3e683349d573a251e4fa1a5729f82af0a9efe83d2ccec6270ccd07fbe1" Sep 30 01:29:26 crc kubenswrapper[4809]: I0930 01:29:26.761373 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8"} Sep 30 01:30:00 crc kubenswrapper[4809]: I0930 01:30:00.152582 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf"] Sep 30 01:30:00 crc kubenswrapper[4809]: E0930 01:30:00.153731 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef750905-629d-47f7-80d2-b7dba63db5f3" containerName="extract-content" Sep 30 01:30:00 crc kubenswrapper[4809]: I0930 01:30:00.153751 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef750905-629d-47f7-80d2-b7dba63db5f3" containerName="extract-content" Sep 30 01:30:00 crc kubenswrapper[4809]: E0930 01:30:00.153795 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef750905-629d-47f7-80d2-b7dba63db5f3" containerName="extract-utilities" Sep 30 01:30:00 crc kubenswrapper[4809]: I0930 01:30:00.153804 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef750905-629d-47f7-80d2-b7dba63db5f3" containerName="extract-utilities" Sep 30 01:30:00 crc kubenswrapper[4809]: E0930 01:30:00.153843 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef750905-629d-47f7-80d2-b7dba63db5f3" containerName="registry-server" Sep 30 01:30:00 crc kubenswrapper[4809]: I0930 01:30:00.153854 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef750905-629d-47f7-80d2-b7dba63db5f3" containerName="registry-server" Sep 30 01:30:00 crc kubenswrapper[4809]: I0930 01:30:00.154129 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef750905-629d-47f7-80d2-b7dba63db5f3" containerName="registry-server" Sep 30 01:30:00 crc kubenswrapper[4809]: I0930 01:30:00.155150 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf" Sep 30 01:30:00 crc kubenswrapper[4809]: I0930 01:30:00.158135 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 01:30:00 crc kubenswrapper[4809]: I0930 01:30:00.158210 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 01:30:00 crc kubenswrapper[4809]: I0930 01:30:00.165414 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf"] Sep 30 01:30:00 crc kubenswrapper[4809]: I0930 01:30:00.216093 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a3eb6865-e0c7-46b7-8eb0-b12656040c1a-config-volume\") pod \"collect-profiles-29319930-6p4xf\" (UID: \"a3eb6865-e0c7-46b7-8eb0-b12656040c1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf" Sep 30 01:30:00 crc kubenswrapper[4809]: I0930 01:30:00.216190 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a3eb6865-e0c7-46b7-8eb0-b12656040c1a-secret-volume\") pod \"collect-profiles-29319930-6p4xf\" (UID: \"a3eb6865-e0c7-46b7-8eb0-b12656040c1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf" Sep 30 01:30:00 crc kubenswrapper[4809]: I0930 01:30:00.216440 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-578bq\" (UniqueName: \"kubernetes.io/projected/a3eb6865-e0c7-46b7-8eb0-b12656040c1a-kube-api-access-578bq\") pod \"collect-profiles-29319930-6p4xf\" (UID: \"a3eb6865-e0c7-46b7-8eb0-b12656040c1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf" Sep 30 01:30:00 crc kubenswrapper[4809]: I0930 01:30:00.319351 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a3eb6865-e0c7-46b7-8eb0-b12656040c1a-config-volume\") pod \"collect-profiles-29319930-6p4xf\" (UID: \"a3eb6865-e0c7-46b7-8eb0-b12656040c1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf" Sep 30 01:30:00 crc kubenswrapper[4809]: I0930 01:30:00.319492 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a3eb6865-e0c7-46b7-8eb0-b12656040c1a-secret-volume\") pod \"collect-profiles-29319930-6p4xf\" (UID: \"a3eb6865-e0c7-46b7-8eb0-b12656040c1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf" Sep 30 01:30:00 crc kubenswrapper[4809]: I0930 01:30:00.319609 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-578bq\" (UniqueName: \"kubernetes.io/projected/a3eb6865-e0c7-46b7-8eb0-b12656040c1a-kube-api-access-578bq\") pod \"collect-profiles-29319930-6p4xf\" (UID: \"a3eb6865-e0c7-46b7-8eb0-b12656040c1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf" Sep 30 01:30:00 crc kubenswrapper[4809]: I0930 01:30:00.321741 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a3eb6865-e0c7-46b7-8eb0-b12656040c1a-config-volume\") pod \"collect-profiles-29319930-6p4xf\" (UID: \"a3eb6865-e0c7-46b7-8eb0-b12656040c1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf" Sep 30 01:30:00 crc kubenswrapper[4809]: I0930 01:30:00.329504 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a3eb6865-e0c7-46b7-8eb0-b12656040c1a-secret-volume\") pod \"collect-profiles-29319930-6p4xf\" (UID: \"a3eb6865-e0c7-46b7-8eb0-b12656040c1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf" Sep 30 01:30:00 crc kubenswrapper[4809]: I0930 01:30:00.346302 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-578bq\" (UniqueName: \"kubernetes.io/projected/a3eb6865-e0c7-46b7-8eb0-b12656040c1a-kube-api-access-578bq\") pod \"collect-profiles-29319930-6p4xf\" (UID: \"a3eb6865-e0c7-46b7-8eb0-b12656040c1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf" Sep 30 01:30:00 crc kubenswrapper[4809]: I0930 01:30:00.526418 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf" Sep 30 01:30:01 crc kubenswrapper[4809]: I0930 01:30:01.045217 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf"] Sep 30 01:30:01 crc kubenswrapper[4809]: I0930 01:30:01.155035 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf" event={"ID":"a3eb6865-e0c7-46b7-8eb0-b12656040c1a","Type":"ContainerStarted","Data":"3d74670109b6736b9ad98ddb1e6659afe865e1fb16048592606df69f929a4873"} Sep 30 01:30:02 crc kubenswrapper[4809]: I0930 01:30:02.170326 4809 generic.go:334] "Generic (PLEG): container finished" podID="a3eb6865-e0c7-46b7-8eb0-b12656040c1a" containerID="6bbc2258fd1a487a1cb4a75e560fcbeb59ceb9ae90f9bec666f3acdc9426913f" exitCode=0 Sep 30 01:30:02 crc kubenswrapper[4809]: I0930 01:30:02.170437 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf" event={"ID":"a3eb6865-e0c7-46b7-8eb0-b12656040c1a","Type":"ContainerDied","Data":"6bbc2258fd1a487a1cb4a75e560fcbeb59ceb9ae90f9bec666f3acdc9426913f"} Sep 30 01:30:03 crc kubenswrapper[4809]: I0930 01:30:03.656121 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf" Sep 30 01:30:03 crc kubenswrapper[4809]: I0930 01:30:03.800825 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a3eb6865-e0c7-46b7-8eb0-b12656040c1a-config-volume\") pod \"a3eb6865-e0c7-46b7-8eb0-b12656040c1a\" (UID: \"a3eb6865-e0c7-46b7-8eb0-b12656040c1a\") " Sep 30 01:30:03 crc kubenswrapper[4809]: I0930 01:30:03.800991 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-578bq\" (UniqueName: \"kubernetes.io/projected/a3eb6865-e0c7-46b7-8eb0-b12656040c1a-kube-api-access-578bq\") pod \"a3eb6865-e0c7-46b7-8eb0-b12656040c1a\" (UID: \"a3eb6865-e0c7-46b7-8eb0-b12656040c1a\") " Sep 30 01:30:03 crc kubenswrapper[4809]: I0930 01:30:03.801720 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3eb6865-e0c7-46b7-8eb0-b12656040c1a-config-volume" (OuterVolumeSpecName: "config-volume") pod "a3eb6865-e0c7-46b7-8eb0-b12656040c1a" (UID: "a3eb6865-e0c7-46b7-8eb0-b12656040c1a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:30:03 crc kubenswrapper[4809]: I0930 01:30:03.802141 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a3eb6865-e0c7-46b7-8eb0-b12656040c1a-secret-volume\") pod \"a3eb6865-e0c7-46b7-8eb0-b12656040c1a\" (UID: \"a3eb6865-e0c7-46b7-8eb0-b12656040c1a\") " Sep 30 01:30:03 crc kubenswrapper[4809]: I0930 01:30:03.803114 4809 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a3eb6865-e0c7-46b7-8eb0-b12656040c1a-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 01:30:03 crc kubenswrapper[4809]: I0930 01:30:03.807669 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3eb6865-e0c7-46b7-8eb0-b12656040c1a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a3eb6865-e0c7-46b7-8eb0-b12656040c1a" (UID: "a3eb6865-e0c7-46b7-8eb0-b12656040c1a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:30:03 crc kubenswrapper[4809]: I0930 01:30:03.807859 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3eb6865-e0c7-46b7-8eb0-b12656040c1a-kube-api-access-578bq" (OuterVolumeSpecName: "kube-api-access-578bq") pod "a3eb6865-e0c7-46b7-8eb0-b12656040c1a" (UID: "a3eb6865-e0c7-46b7-8eb0-b12656040c1a"). InnerVolumeSpecName "kube-api-access-578bq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:30:03 crc kubenswrapper[4809]: I0930 01:30:03.905891 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-578bq\" (UniqueName: \"kubernetes.io/projected/a3eb6865-e0c7-46b7-8eb0-b12656040c1a-kube-api-access-578bq\") on node \"crc\" DevicePath \"\"" Sep 30 01:30:03 crc kubenswrapper[4809]: I0930 01:30:03.905935 4809 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a3eb6865-e0c7-46b7-8eb0-b12656040c1a-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 01:30:04 crc kubenswrapper[4809]: I0930 01:30:04.196044 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf" event={"ID":"a3eb6865-e0c7-46b7-8eb0-b12656040c1a","Type":"ContainerDied","Data":"3d74670109b6736b9ad98ddb1e6659afe865e1fb16048592606df69f929a4873"} Sep 30 01:30:04 crc kubenswrapper[4809]: I0930 01:30:04.196094 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf" Sep 30 01:30:04 crc kubenswrapper[4809]: I0930 01:30:04.196101 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d74670109b6736b9ad98ddb1e6659afe865e1fb16048592606df69f929a4873" Sep 30 01:30:04 crc kubenswrapper[4809]: I0930 01:30:04.754160 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8"] Sep 30 01:30:04 crc kubenswrapper[4809]: I0930 01:30:04.769965 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319885-ff7p8"] Sep 30 01:30:05 crc kubenswrapper[4809]: I0930 01:30:05.703992 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09d2f33b-d671-4c21-b1bc-babe71939120" path="/var/lib/kubelet/pods/09d2f33b-d671-4c21-b1bc-babe71939120/volumes" Sep 30 01:30:18 crc kubenswrapper[4809]: I0930 01:30:18.713496 4809 scope.go:117] "RemoveContainer" containerID="efb7bd62fca5b0e3f5f87719df74eb0b693b3bdd9decaae163b0fe3baba5e94a" Sep 30 01:31:25 crc kubenswrapper[4809]: I0930 01:31:25.325174 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:31:25 crc kubenswrapper[4809]: I0930 01:31:25.325923 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:31:55 crc kubenswrapper[4809]: I0930 01:31:55.170729 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-md9fs"] Sep 30 01:31:55 crc kubenswrapper[4809]: E0930 01:31:55.172433 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3eb6865-e0c7-46b7-8eb0-b12656040c1a" containerName="collect-profiles" Sep 30 01:31:55 crc kubenswrapper[4809]: I0930 01:31:55.172515 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3eb6865-e0c7-46b7-8eb0-b12656040c1a" containerName="collect-profiles" Sep 30 01:31:55 crc kubenswrapper[4809]: I0930 01:31:55.172845 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3eb6865-e0c7-46b7-8eb0-b12656040c1a" containerName="collect-profiles" Sep 30 01:31:55 crc kubenswrapper[4809]: I0930 01:31:55.174779 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-md9fs" Sep 30 01:31:55 crc kubenswrapper[4809]: I0930 01:31:55.190186 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-md9fs"] Sep 30 01:31:55 crc kubenswrapper[4809]: I0930 01:31:55.267626 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvrg8\" (UniqueName: \"kubernetes.io/projected/0db36c53-4e80-4ad1-b584-756fd20c96e9-kube-api-access-hvrg8\") pod \"redhat-operators-md9fs\" (UID: \"0db36c53-4e80-4ad1-b584-756fd20c96e9\") " pod="openshift-marketplace/redhat-operators-md9fs" Sep 30 01:31:55 crc kubenswrapper[4809]: I0930 01:31:55.267782 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0db36c53-4e80-4ad1-b584-756fd20c96e9-utilities\") pod \"redhat-operators-md9fs\" (UID: \"0db36c53-4e80-4ad1-b584-756fd20c96e9\") " pod="openshift-marketplace/redhat-operators-md9fs" Sep 30 01:31:55 crc kubenswrapper[4809]: I0930 01:31:55.268238 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0db36c53-4e80-4ad1-b584-756fd20c96e9-catalog-content\") pod \"redhat-operators-md9fs\" (UID: \"0db36c53-4e80-4ad1-b584-756fd20c96e9\") " pod="openshift-marketplace/redhat-operators-md9fs" Sep 30 01:31:55 crc kubenswrapper[4809]: I0930 01:31:55.324910 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:31:55 crc kubenswrapper[4809]: I0930 01:31:55.324984 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:31:55 crc kubenswrapper[4809]: I0930 01:31:55.370424 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvrg8\" (UniqueName: \"kubernetes.io/projected/0db36c53-4e80-4ad1-b584-756fd20c96e9-kube-api-access-hvrg8\") pod \"redhat-operators-md9fs\" (UID: \"0db36c53-4e80-4ad1-b584-756fd20c96e9\") " pod="openshift-marketplace/redhat-operators-md9fs" Sep 30 01:31:55 crc kubenswrapper[4809]: I0930 01:31:55.370538 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0db36c53-4e80-4ad1-b584-756fd20c96e9-utilities\") pod \"redhat-operators-md9fs\" (UID: \"0db36c53-4e80-4ad1-b584-756fd20c96e9\") " pod="openshift-marketplace/redhat-operators-md9fs" Sep 30 01:31:55 crc kubenswrapper[4809]: I0930 01:31:55.370738 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0db36c53-4e80-4ad1-b584-756fd20c96e9-catalog-content\") pod \"redhat-operators-md9fs\" (UID: \"0db36c53-4e80-4ad1-b584-756fd20c96e9\") " pod="openshift-marketplace/redhat-operators-md9fs" Sep 30 01:31:55 crc kubenswrapper[4809]: I0930 01:31:55.371113 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0db36c53-4e80-4ad1-b584-756fd20c96e9-utilities\") pod \"redhat-operators-md9fs\" (UID: \"0db36c53-4e80-4ad1-b584-756fd20c96e9\") " pod="openshift-marketplace/redhat-operators-md9fs" Sep 30 01:31:55 crc kubenswrapper[4809]: I0930 01:31:55.371483 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0db36c53-4e80-4ad1-b584-756fd20c96e9-catalog-content\") pod \"redhat-operators-md9fs\" (UID: \"0db36c53-4e80-4ad1-b584-756fd20c96e9\") " pod="openshift-marketplace/redhat-operators-md9fs" Sep 30 01:31:55 crc kubenswrapper[4809]: I0930 01:31:55.903321 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvrg8\" (UniqueName: \"kubernetes.io/projected/0db36c53-4e80-4ad1-b584-756fd20c96e9-kube-api-access-hvrg8\") pod \"redhat-operators-md9fs\" (UID: \"0db36c53-4e80-4ad1-b584-756fd20c96e9\") " pod="openshift-marketplace/redhat-operators-md9fs" Sep 30 01:31:56 crc kubenswrapper[4809]: I0930 01:31:56.097058 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-md9fs" Sep 30 01:31:56 crc kubenswrapper[4809]: I0930 01:31:56.664026 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-md9fs"] Sep 30 01:31:56 crc kubenswrapper[4809]: I0930 01:31:56.720059 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-md9fs" event={"ID":"0db36c53-4e80-4ad1-b584-756fd20c96e9","Type":"ContainerStarted","Data":"267062dd48814fe7692cf486864e622f6850760066bf1e1c888eb7c9eca2bcca"} Sep 30 01:31:57 crc kubenswrapper[4809]: I0930 01:31:57.750116 4809 generic.go:334] "Generic (PLEG): container finished" podID="0db36c53-4e80-4ad1-b584-756fd20c96e9" containerID="4a09a47228fc0ae50066f73eb44f1e58cb8e01fca8640b3df71a92a77db7adf5" exitCode=0 Sep 30 01:31:57 crc kubenswrapper[4809]: I0930 01:31:57.750468 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-md9fs" event={"ID":"0db36c53-4e80-4ad1-b584-756fd20c96e9","Type":"ContainerDied","Data":"4a09a47228fc0ae50066f73eb44f1e58cb8e01fca8640b3df71a92a77db7adf5"} Sep 30 01:31:57 crc kubenswrapper[4809]: I0930 01:31:57.754890 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 01:31:58 crc kubenswrapper[4809]: I0930 01:31:58.760352 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-md9fs" event={"ID":"0db36c53-4e80-4ad1-b584-756fd20c96e9","Type":"ContainerStarted","Data":"68369d658aae9f6cd9315505ae5ac6681766046da5504d28f14539b7b8eca262"} Sep 30 01:32:00 crc kubenswrapper[4809]: I0930 01:32:00.551791 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2jtz6"] Sep 30 01:32:00 crc kubenswrapper[4809]: I0930 01:32:00.556619 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2jtz6" Sep 30 01:32:00 crc kubenswrapper[4809]: I0930 01:32:00.562892 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2jtz6"] Sep 30 01:32:00 crc kubenswrapper[4809]: I0930 01:32:00.705757 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c380665-1c38-43d6-a0c1-ed2e639e9225-catalog-content\") pod \"redhat-marketplace-2jtz6\" (UID: \"2c380665-1c38-43d6-a0c1-ed2e639e9225\") " pod="openshift-marketplace/redhat-marketplace-2jtz6" Sep 30 01:32:00 crc kubenswrapper[4809]: I0930 01:32:00.705846 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8h26\" (UniqueName: \"kubernetes.io/projected/2c380665-1c38-43d6-a0c1-ed2e639e9225-kube-api-access-f8h26\") pod \"redhat-marketplace-2jtz6\" (UID: \"2c380665-1c38-43d6-a0c1-ed2e639e9225\") " pod="openshift-marketplace/redhat-marketplace-2jtz6" Sep 30 01:32:00 crc kubenswrapper[4809]: I0930 01:32:00.706268 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c380665-1c38-43d6-a0c1-ed2e639e9225-utilities\") pod \"redhat-marketplace-2jtz6\" (UID: \"2c380665-1c38-43d6-a0c1-ed2e639e9225\") " pod="openshift-marketplace/redhat-marketplace-2jtz6" Sep 30 01:32:00 crc kubenswrapper[4809]: I0930 01:32:00.808260 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c380665-1c38-43d6-a0c1-ed2e639e9225-catalog-content\") pod \"redhat-marketplace-2jtz6\" (UID: \"2c380665-1c38-43d6-a0c1-ed2e639e9225\") " pod="openshift-marketplace/redhat-marketplace-2jtz6" Sep 30 01:32:00 crc kubenswrapper[4809]: I0930 01:32:00.809342 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8h26\" (UniqueName: \"kubernetes.io/projected/2c380665-1c38-43d6-a0c1-ed2e639e9225-kube-api-access-f8h26\") pod \"redhat-marketplace-2jtz6\" (UID: \"2c380665-1c38-43d6-a0c1-ed2e639e9225\") " pod="openshift-marketplace/redhat-marketplace-2jtz6" Sep 30 01:32:00 crc kubenswrapper[4809]: I0930 01:32:00.809418 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c380665-1c38-43d6-a0c1-ed2e639e9225-catalog-content\") pod \"redhat-marketplace-2jtz6\" (UID: \"2c380665-1c38-43d6-a0c1-ed2e639e9225\") " pod="openshift-marketplace/redhat-marketplace-2jtz6" Sep 30 01:32:00 crc kubenswrapper[4809]: I0930 01:32:00.809629 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c380665-1c38-43d6-a0c1-ed2e639e9225-utilities\") pod \"redhat-marketplace-2jtz6\" (UID: \"2c380665-1c38-43d6-a0c1-ed2e639e9225\") " pod="openshift-marketplace/redhat-marketplace-2jtz6" Sep 30 01:32:00 crc kubenswrapper[4809]: I0930 01:32:00.810064 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c380665-1c38-43d6-a0c1-ed2e639e9225-utilities\") pod \"redhat-marketplace-2jtz6\" (UID: \"2c380665-1c38-43d6-a0c1-ed2e639e9225\") " pod="openshift-marketplace/redhat-marketplace-2jtz6" Sep 30 01:32:00 crc kubenswrapper[4809]: I0930 01:32:00.836853 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8h26\" (UniqueName: \"kubernetes.io/projected/2c380665-1c38-43d6-a0c1-ed2e639e9225-kube-api-access-f8h26\") pod \"redhat-marketplace-2jtz6\" (UID: \"2c380665-1c38-43d6-a0c1-ed2e639e9225\") " pod="openshift-marketplace/redhat-marketplace-2jtz6" Sep 30 01:32:00 crc kubenswrapper[4809]: I0930 01:32:00.895955 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2jtz6" Sep 30 01:32:01 crc kubenswrapper[4809]: I0930 01:32:01.443731 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2jtz6"] Sep 30 01:32:01 crc kubenswrapper[4809]: I0930 01:32:01.791561 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2jtz6" event={"ID":"2c380665-1c38-43d6-a0c1-ed2e639e9225","Type":"ContainerStarted","Data":"d027f8d10c8b6518c70321830a2b7ec1ee592471d183c9ecdf097a29f98a4302"} Sep 30 01:32:01 crc kubenswrapper[4809]: I0930 01:32:01.791952 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2jtz6" event={"ID":"2c380665-1c38-43d6-a0c1-ed2e639e9225","Type":"ContainerStarted","Data":"399186eac5a5966f1e8f3db139da390ae620fb53608468011c620b574b1d6962"} Sep 30 01:32:02 crc kubenswrapper[4809]: I0930 01:32:02.805526 4809 generic.go:334] "Generic (PLEG): container finished" podID="0db36c53-4e80-4ad1-b584-756fd20c96e9" containerID="68369d658aae9f6cd9315505ae5ac6681766046da5504d28f14539b7b8eca262" exitCode=0 Sep 30 01:32:02 crc kubenswrapper[4809]: I0930 01:32:02.805679 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-md9fs" event={"ID":"0db36c53-4e80-4ad1-b584-756fd20c96e9","Type":"ContainerDied","Data":"68369d658aae9f6cd9315505ae5ac6681766046da5504d28f14539b7b8eca262"} Sep 30 01:32:02 crc kubenswrapper[4809]: I0930 01:32:02.808432 4809 generic.go:334] "Generic (PLEG): container finished" podID="2c380665-1c38-43d6-a0c1-ed2e639e9225" containerID="d027f8d10c8b6518c70321830a2b7ec1ee592471d183c9ecdf097a29f98a4302" exitCode=0 Sep 30 01:32:02 crc kubenswrapper[4809]: I0930 01:32:02.808458 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2jtz6" event={"ID":"2c380665-1c38-43d6-a0c1-ed2e639e9225","Type":"ContainerDied","Data":"d027f8d10c8b6518c70321830a2b7ec1ee592471d183c9ecdf097a29f98a4302"} Sep 30 01:32:03 crc kubenswrapper[4809]: I0930 01:32:03.818882 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2jtz6" event={"ID":"2c380665-1c38-43d6-a0c1-ed2e639e9225","Type":"ContainerStarted","Data":"c93712a0cec4859cf7f85f553bee4cfecb74b0237b5682866b3d99024f1963be"} Sep 30 01:32:03 crc kubenswrapper[4809]: I0930 01:32:03.821792 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-md9fs" event={"ID":"0db36c53-4e80-4ad1-b584-756fd20c96e9","Type":"ContainerStarted","Data":"90cb8a086be5e171e742fda8a1db9f56d2569f2ce233159100610fb2e609fc3b"} Sep 30 01:32:03 crc kubenswrapper[4809]: I0930 01:32:03.860899 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-md9fs" podStartSLOduration=3.3038075080000002 podStartE2EDuration="8.860879853s" podCreationTimestamp="2025-09-30 01:31:55 +0000 UTC" firstStartedPulling="2025-09-30 01:31:57.754469829 +0000 UTC m=+4968.790719247" lastFinishedPulling="2025-09-30 01:32:03.311542184 +0000 UTC m=+4974.347791592" observedRunningTime="2025-09-30 01:32:03.857299414 +0000 UTC m=+4974.893548842" watchObservedRunningTime="2025-09-30 01:32:03.860879853 +0000 UTC m=+4974.897129261" Sep 30 01:32:04 crc kubenswrapper[4809]: I0930 01:32:04.838557 4809 generic.go:334] "Generic (PLEG): container finished" podID="2c380665-1c38-43d6-a0c1-ed2e639e9225" containerID="c93712a0cec4859cf7f85f553bee4cfecb74b0237b5682866b3d99024f1963be" exitCode=0 Sep 30 01:32:04 crc kubenswrapper[4809]: I0930 01:32:04.838866 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2jtz6" event={"ID":"2c380665-1c38-43d6-a0c1-ed2e639e9225","Type":"ContainerDied","Data":"c93712a0cec4859cf7f85f553bee4cfecb74b0237b5682866b3d99024f1963be"} Sep 30 01:32:05 crc kubenswrapper[4809]: I0930 01:32:05.852579 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2jtz6" event={"ID":"2c380665-1c38-43d6-a0c1-ed2e639e9225","Type":"ContainerStarted","Data":"651ff8126f459cfb642293c5d6d8b6f941704eda078ff43cd7888c4224b095a0"} Sep 30 01:32:05 crc kubenswrapper[4809]: I0930 01:32:05.884575 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2jtz6" podStartSLOduration=3.505228052 podStartE2EDuration="5.8845588s" podCreationTimestamp="2025-09-30 01:32:00 +0000 UTC" firstStartedPulling="2025-09-30 01:32:02.81141711 +0000 UTC m=+4973.847666528" lastFinishedPulling="2025-09-30 01:32:05.190747828 +0000 UTC m=+4976.226997276" observedRunningTime="2025-09-30 01:32:05.882680548 +0000 UTC m=+4976.918929966" watchObservedRunningTime="2025-09-30 01:32:05.8845588 +0000 UTC m=+4976.920808198" Sep 30 01:32:06 crc kubenswrapper[4809]: I0930 01:32:06.097269 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-md9fs" Sep 30 01:32:06 crc kubenswrapper[4809]: I0930 01:32:06.097702 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-md9fs" Sep 30 01:32:07 crc kubenswrapper[4809]: I0930 01:32:07.154338 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-md9fs" podUID="0db36c53-4e80-4ad1-b584-756fd20c96e9" containerName="registry-server" probeResult="failure" output=< Sep 30 01:32:07 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 01:32:07 crc kubenswrapper[4809]: > Sep 30 01:32:10 crc kubenswrapper[4809]: I0930 01:32:10.896715 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2jtz6" Sep 30 01:32:10 crc kubenswrapper[4809]: I0930 01:32:10.897269 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2jtz6" Sep 30 01:32:10 crc kubenswrapper[4809]: I0930 01:32:10.995896 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2jtz6" Sep 30 01:32:11 crc kubenswrapper[4809]: I0930 01:32:11.985873 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2jtz6" Sep 30 01:32:12 crc kubenswrapper[4809]: I0930 01:32:12.050332 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2jtz6"] Sep 30 01:32:13 crc kubenswrapper[4809]: I0930 01:32:13.943578 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2jtz6" podUID="2c380665-1c38-43d6-a0c1-ed2e639e9225" containerName="registry-server" containerID="cri-o://651ff8126f459cfb642293c5d6d8b6f941704eda078ff43cd7888c4224b095a0" gracePeriod=2 Sep 30 01:32:14 crc kubenswrapper[4809]: I0930 01:32:14.557101 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2jtz6" Sep 30 01:32:14 crc kubenswrapper[4809]: I0930 01:32:14.678042 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c380665-1c38-43d6-a0c1-ed2e639e9225-utilities\") pod \"2c380665-1c38-43d6-a0c1-ed2e639e9225\" (UID: \"2c380665-1c38-43d6-a0c1-ed2e639e9225\") " Sep 30 01:32:14 crc kubenswrapper[4809]: I0930 01:32:14.678133 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8h26\" (UniqueName: \"kubernetes.io/projected/2c380665-1c38-43d6-a0c1-ed2e639e9225-kube-api-access-f8h26\") pod \"2c380665-1c38-43d6-a0c1-ed2e639e9225\" (UID: \"2c380665-1c38-43d6-a0c1-ed2e639e9225\") " Sep 30 01:32:14 crc kubenswrapper[4809]: I0930 01:32:14.678200 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c380665-1c38-43d6-a0c1-ed2e639e9225-catalog-content\") pod \"2c380665-1c38-43d6-a0c1-ed2e639e9225\" (UID: \"2c380665-1c38-43d6-a0c1-ed2e639e9225\") " Sep 30 01:32:14 crc kubenswrapper[4809]: I0930 01:32:14.678930 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c380665-1c38-43d6-a0c1-ed2e639e9225-utilities" (OuterVolumeSpecName: "utilities") pod "2c380665-1c38-43d6-a0c1-ed2e639e9225" (UID: "2c380665-1c38-43d6-a0c1-ed2e639e9225"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:32:14 crc kubenswrapper[4809]: I0930 01:32:14.679183 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c380665-1c38-43d6-a0c1-ed2e639e9225-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:32:14 crc kubenswrapper[4809]: I0930 01:32:14.684120 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c380665-1c38-43d6-a0c1-ed2e639e9225-kube-api-access-f8h26" (OuterVolumeSpecName: "kube-api-access-f8h26") pod "2c380665-1c38-43d6-a0c1-ed2e639e9225" (UID: "2c380665-1c38-43d6-a0c1-ed2e639e9225"). InnerVolumeSpecName "kube-api-access-f8h26". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:32:14 crc kubenswrapper[4809]: I0930 01:32:14.692551 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c380665-1c38-43d6-a0c1-ed2e639e9225-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2c380665-1c38-43d6-a0c1-ed2e639e9225" (UID: "2c380665-1c38-43d6-a0c1-ed2e639e9225"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:32:14 crc kubenswrapper[4809]: I0930 01:32:14.783264 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8h26\" (UniqueName: \"kubernetes.io/projected/2c380665-1c38-43d6-a0c1-ed2e639e9225-kube-api-access-f8h26\") on node \"crc\" DevicePath \"\"" Sep 30 01:32:14 crc kubenswrapper[4809]: I0930 01:32:14.783505 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c380665-1c38-43d6-a0c1-ed2e639e9225-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:32:14 crc kubenswrapper[4809]: I0930 01:32:14.956387 4809 generic.go:334] "Generic (PLEG): container finished" podID="2c380665-1c38-43d6-a0c1-ed2e639e9225" containerID="651ff8126f459cfb642293c5d6d8b6f941704eda078ff43cd7888c4224b095a0" exitCode=0 Sep 30 01:32:14 crc kubenswrapper[4809]: I0930 01:32:14.956440 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2jtz6" event={"ID":"2c380665-1c38-43d6-a0c1-ed2e639e9225","Type":"ContainerDied","Data":"651ff8126f459cfb642293c5d6d8b6f941704eda078ff43cd7888c4224b095a0"} Sep 30 01:32:14 crc kubenswrapper[4809]: I0930 01:32:14.956509 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2jtz6" event={"ID":"2c380665-1c38-43d6-a0c1-ed2e639e9225","Type":"ContainerDied","Data":"399186eac5a5966f1e8f3db139da390ae620fb53608468011c620b574b1d6962"} Sep 30 01:32:14 crc kubenswrapper[4809]: I0930 01:32:14.956533 4809 scope.go:117] "RemoveContainer" containerID="651ff8126f459cfb642293c5d6d8b6f941704eda078ff43cd7888c4224b095a0" Sep 30 01:32:14 crc kubenswrapper[4809]: I0930 01:32:14.956463 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2jtz6" Sep 30 01:32:14 crc kubenswrapper[4809]: I0930 01:32:14.990729 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2jtz6"] Sep 30 01:32:14 crc kubenswrapper[4809]: I0930 01:32:14.993689 4809 scope.go:117] "RemoveContainer" containerID="c93712a0cec4859cf7f85f553bee4cfecb74b0237b5682866b3d99024f1963be" Sep 30 01:32:15 crc kubenswrapper[4809]: I0930 01:32:15.000339 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2jtz6"] Sep 30 01:32:15 crc kubenswrapper[4809]: I0930 01:32:15.018602 4809 scope.go:117] "RemoveContainer" containerID="d027f8d10c8b6518c70321830a2b7ec1ee592471d183c9ecdf097a29f98a4302" Sep 30 01:32:15 crc kubenswrapper[4809]: I0930 01:32:15.092146 4809 scope.go:117] "RemoveContainer" containerID="651ff8126f459cfb642293c5d6d8b6f941704eda078ff43cd7888c4224b095a0" Sep 30 01:32:15 crc kubenswrapper[4809]: E0930 01:32:15.092542 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"651ff8126f459cfb642293c5d6d8b6f941704eda078ff43cd7888c4224b095a0\": container with ID starting with 651ff8126f459cfb642293c5d6d8b6f941704eda078ff43cd7888c4224b095a0 not found: ID does not exist" containerID="651ff8126f459cfb642293c5d6d8b6f941704eda078ff43cd7888c4224b095a0" Sep 30 01:32:15 crc kubenswrapper[4809]: I0930 01:32:15.092572 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"651ff8126f459cfb642293c5d6d8b6f941704eda078ff43cd7888c4224b095a0"} err="failed to get container status \"651ff8126f459cfb642293c5d6d8b6f941704eda078ff43cd7888c4224b095a0\": rpc error: code = NotFound desc = could not find container \"651ff8126f459cfb642293c5d6d8b6f941704eda078ff43cd7888c4224b095a0\": container with ID starting with 651ff8126f459cfb642293c5d6d8b6f941704eda078ff43cd7888c4224b095a0 not found: ID does not exist" Sep 30 01:32:15 crc kubenswrapper[4809]: I0930 01:32:15.092592 4809 scope.go:117] "RemoveContainer" containerID="c93712a0cec4859cf7f85f553bee4cfecb74b0237b5682866b3d99024f1963be" Sep 30 01:32:15 crc kubenswrapper[4809]: E0930 01:32:15.092904 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c93712a0cec4859cf7f85f553bee4cfecb74b0237b5682866b3d99024f1963be\": container with ID starting with c93712a0cec4859cf7f85f553bee4cfecb74b0237b5682866b3d99024f1963be not found: ID does not exist" containerID="c93712a0cec4859cf7f85f553bee4cfecb74b0237b5682866b3d99024f1963be" Sep 30 01:32:15 crc kubenswrapper[4809]: I0930 01:32:15.092922 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c93712a0cec4859cf7f85f553bee4cfecb74b0237b5682866b3d99024f1963be"} err="failed to get container status \"c93712a0cec4859cf7f85f553bee4cfecb74b0237b5682866b3d99024f1963be\": rpc error: code = NotFound desc = could not find container \"c93712a0cec4859cf7f85f553bee4cfecb74b0237b5682866b3d99024f1963be\": container with ID starting with c93712a0cec4859cf7f85f553bee4cfecb74b0237b5682866b3d99024f1963be not found: ID does not exist" Sep 30 01:32:15 crc kubenswrapper[4809]: I0930 01:32:15.092935 4809 scope.go:117] "RemoveContainer" containerID="d027f8d10c8b6518c70321830a2b7ec1ee592471d183c9ecdf097a29f98a4302" Sep 30 01:32:15 crc kubenswrapper[4809]: E0930 01:32:15.093127 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d027f8d10c8b6518c70321830a2b7ec1ee592471d183c9ecdf097a29f98a4302\": container with ID starting with d027f8d10c8b6518c70321830a2b7ec1ee592471d183c9ecdf097a29f98a4302 not found: ID does not exist" containerID="d027f8d10c8b6518c70321830a2b7ec1ee592471d183c9ecdf097a29f98a4302" Sep 30 01:32:15 crc kubenswrapper[4809]: I0930 01:32:15.093176 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d027f8d10c8b6518c70321830a2b7ec1ee592471d183c9ecdf097a29f98a4302"} err="failed to get container status \"d027f8d10c8b6518c70321830a2b7ec1ee592471d183c9ecdf097a29f98a4302\": rpc error: code = NotFound desc = could not find container \"d027f8d10c8b6518c70321830a2b7ec1ee592471d183c9ecdf097a29f98a4302\": container with ID starting with d027f8d10c8b6518c70321830a2b7ec1ee592471d183c9ecdf097a29f98a4302 not found: ID does not exist" Sep 30 01:32:15 crc kubenswrapper[4809]: I0930 01:32:15.707025 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c380665-1c38-43d6-a0c1-ed2e639e9225" path="/var/lib/kubelet/pods/2c380665-1c38-43d6-a0c1-ed2e639e9225/volumes" Sep 30 01:32:16 crc kubenswrapper[4809]: I0930 01:32:16.158454 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-md9fs" Sep 30 01:32:16 crc kubenswrapper[4809]: I0930 01:32:16.231068 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-md9fs" Sep 30 01:32:16 crc kubenswrapper[4809]: I0930 01:32:16.790989 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-md9fs"] Sep 30 01:32:18 crc kubenswrapper[4809]: I0930 01:32:18.002982 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-md9fs" podUID="0db36c53-4e80-4ad1-b584-756fd20c96e9" containerName="registry-server" containerID="cri-o://90cb8a086be5e171e742fda8a1db9f56d2569f2ce233159100610fb2e609fc3b" gracePeriod=2 Sep 30 01:32:18 crc kubenswrapper[4809]: I0930 01:32:18.642089 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-md9fs" Sep 30 01:32:18 crc kubenswrapper[4809]: I0930 01:32:18.790925 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0db36c53-4e80-4ad1-b584-756fd20c96e9-catalog-content\") pod \"0db36c53-4e80-4ad1-b584-756fd20c96e9\" (UID: \"0db36c53-4e80-4ad1-b584-756fd20c96e9\") " Sep 30 01:32:18 crc kubenswrapper[4809]: I0930 01:32:18.791086 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0db36c53-4e80-4ad1-b584-756fd20c96e9-utilities\") pod \"0db36c53-4e80-4ad1-b584-756fd20c96e9\" (UID: \"0db36c53-4e80-4ad1-b584-756fd20c96e9\") " Sep 30 01:32:18 crc kubenswrapper[4809]: I0930 01:32:18.791296 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hvrg8\" (UniqueName: \"kubernetes.io/projected/0db36c53-4e80-4ad1-b584-756fd20c96e9-kube-api-access-hvrg8\") pod \"0db36c53-4e80-4ad1-b584-756fd20c96e9\" (UID: \"0db36c53-4e80-4ad1-b584-756fd20c96e9\") " Sep 30 01:32:18 crc kubenswrapper[4809]: I0930 01:32:18.791954 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0db36c53-4e80-4ad1-b584-756fd20c96e9-utilities" (OuterVolumeSpecName: "utilities") pod "0db36c53-4e80-4ad1-b584-756fd20c96e9" (UID: "0db36c53-4e80-4ad1-b584-756fd20c96e9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:32:18 crc kubenswrapper[4809]: I0930 01:32:18.802859 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0db36c53-4e80-4ad1-b584-756fd20c96e9-kube-api-access-hvrg8" (OuterVolumeSpecName: "kube-api-access-hvrg8") pod "0db36c53-4e80-4ad1-b584-756fd20c96e9" (UID: "0db36c53-4e80-4ad1-b584-756fd20c96e9"). InnerVolumeSpecName "kube-api-access-hvrg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:32:18 crc kubenswrapper[4809]: I0930 01:32:18.893748 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hvrg8\" (UniqueName: \"kubernetes.io/projected/0db36c53-4e80-4ad1-b584-756fd20c96e9-kube-api-access-hvrg8\") on node \"crc\" DevicePath \"\"" Sep 30 01:32:18 crc kubenswrapper[4809]: I0930 01:32:18.893776 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0db36c53-4e80-4ad1-b584-756fd20c96e9-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:32:18 crc kubenswrapper[4809]: I0930 01:32:18.899787 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0db36c53-4e80-4ad1-b584-756fd20c96e9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0db36c53-4e80-4ad1-b584-756fd20c96e9" (UID: "0db36c53-4e80-4ad1-b584-756fd20c96e9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:32:18 crc kubenswrapper[4809]: I0930 01:32:18.995776 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0db36c53-4e80-4ad1-b584-756fd20c96e9-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:32:19 crc kubenswrapper[4809]: I0930 01:32:19.014881 4809 generic.go:334] "Generic (PLEG): container finished" podID="0db36c53-4e80-4ad1-b584-756fd20c96e9" containerID="90cb8a086be5e171e742fda8a1db9f56d2569f2ce233159100610fb2e609fc3b" exitCode=0 Sep 30 01:32:19 crc kubenswrapper[4809]: I0930 01:32:19.014923 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-md9fs" event={"ID":"0db36c53-4e80-4ad1-b584-756fd20c96e9","Type":"ContainerDied","Data":"90cb8a086be5e171e742fda8a1db9f56d2569f2ce233159100610fb2e609fc3b"} Sep 30 01:32:19 crc kubenswrapper[4809]: I0930 01:32:19.014934 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-md9fs" Sep 30 01:32:19 crc kubenswrapper[4809]: I0930 01:32:19.014957 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-md9fs" event={"ID":"0db36c53-4e80-4ad1-b584-756fd20c96e9","Type":"ContainerDied","Data":"267062dd48814fe7692cf486864e622f6850760066bf1e1c888eb7c9eca2bcca"} Sep 30 01:32:19 crc kubenswrapper[4809]: I0930 01:32:19.014976 4809 scope.go:117] "RemoveContainer" containerID="90cb8a086be5e171e742fda8a1db9f56d2569f2ce233159100610fb2e609fc3b" Sep 30 01:32:19 crc kubenswrapper[4809]: I0930 01:32:19.057547 4809 scope.go:117] "RemoveContainer" containerID="68369d658aae9f6cd9315505ae5ac6681766046da5504d28f14539b7b8eca262" Sep 30 01:32:19 crc kubenswrapper[4809]: I0930 01:32:19.059175 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-md9fs"] Sep 30 01:32:19 crc kubenswrapper[4809]: I0930 01:32:19.070548 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-md9fs"] Sep 30 01:32:19 crc kubenswrapper[4809]: I0930 01:32:19.096472 4809 scope.go:117] "RemoveContainer" containerID="4a09a47228fc0ae50066f73eb44f1e58cb8e01fca8640b3df71a92a77db7adf5" Sep 30 01:32:19 crc kubenswrapper[4809]: I0930 01:32:19.132875 4809 scope.go:117] "RemoveContainer" containerID="90cb8a086be5e171e742fda8a1db9f56d2569f2ce233159100610fb2e609fc3b" Sep 30 01:32:19 crc kubenswrapper[4809]: E0930 01:32:19.133558 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90cb8a086be5e171e742fda8a1db9f56d2569f2ce233159100610fb2e609fc3b\": container with ID starting with 90cb8a086be5e171e742fda8a1db9f56d2569f2ce233159100610fb2e609fc3b not found: ID does not exist" containerID="90cb8a086be5e171e742fda8a1db9f56d2569f2ce233159100610fb2e609fc3b" Sep 30 01:32:19 crc kubenswrapper[4809]: I0930 01:32:19.133600 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90cb8a086be5e171e742fda8a1db9f56d2569f2ce233159100610fb2e609fc3b"} err="failed to get container status \"90cb8a086be5e171e742fda8a1db9f56d2569f2ce233159100610fb2e609fc3b\": rpc error: code = NotFound desc = could not find container \"90cb8a086be5e171e742fda8a1db9f56d2569f2ce233159100610fb2e609fc3b\": container with ID starting with 90cb8a086be5e171e742fda8a1db9f56d2569f2ce233159100610fb2e609fc3b not found: ID does not exist" Sep 30 01:32:19 crc kubenswrapper[4809]: I0930 01:32:19.133627 4809 scope.go:117] "RemoveContainer" containerID="68369d658aae9f6cd9315505ae5ac6681766046da5504d28f14539b7b8eca262" Sep 30 01:32:19 crc kubenswrapper[4809]: E0930 01:32:19.134200 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68369d658aae9f6cd9315505ae5ac6681766046da5504d28f14539b7b8eca262\": container with ID starting with 68369d658aae9f6cd9315505ae5ac6681766046da5504d28f14539b7b8eca262 not found: ID does not exist" containerID="68369d658aae9f6cd9315505ae5ac6681766046da5504d28f14539b7b8eca262" Sep 30 01:32:19 crc kubenswrapper[4809]: I0930 01:32:19.134244 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68369d658aae9f6cd9315505ae5ac6681766046da5504d28f14539b7b8eca262"} err="failed to get container status \"68369d658aae9f6cd9315505ae5ac6681766046da5504d28f14539b7b8eca262\": rpc error: code = NotFound desc = could not find container \"68369d658aae9f6cd9315505ae5ac6681766046da5504d28f14539b7b8eca262\": container with ID starting with 68369d658aae9f6cd9315505ae5ac6681766046da5504d28f14539b7b8eca262 not found: ID does not exist" Sep 30 01:32:19 crc kubenswrapper[4809]: I0930 01:32:19.134257 4809 scope.go:117] "RemoveContainer" containerID="4a09a47228fc0ae50066f73eb44f1e58cb8e01fca8640b3df71a92a77db7adf5" Sep 30 01:32:19 crc kubenswrapper[4809]: E0930 01:32:19.134543 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a09a47228fc0ae50066f73eb44f1e58cb8e01fca8640b3df71a92a77db7adf5\": container with ID starting with 4a09a47228fc0ae50066f73eb44f1e58cb8e01fca8640b3df71a92a77db7adf5 not found: ID does not exist" containerID="4a09a47228fc0ae50066f73eb44f1e58cb8e01fca8640b3df71a92a77db7adf5" Sep 30 01:32:19 crc kubenswrapper[4809]: I0930 01:32:19.134582 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a09a47228fc0ae50066f73eb44f1e58cb8e01fca8640b3df71a92a77db7adf5"} err="failed to get container status \"4a09a47228fc0ae50066f73eb44f1e58cb8e01fca8640b3df71a92a77db7adf5\": rpc error: code = NotFound desc = could not find container \"4a09a47228fc0ae50066f73eb44f1e58cb8e01fca8640b3df71a92a77db7adf5\": container with ID starting with 4a09a47228fc0ae50066f73eb44f1e58cb8e01fca8640b3df71a92a77db7adf5 not found: ID does not exist" Sep 30 01:32:19 crc kubenswrapper[4809]: E0930 01:32:19.273500 4809 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0db36c53_4e80_4ad1_b584_756fd20c96e9.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0db36c53_4e80_4ad1_b584_756fd20c96e9.slice/crio-267062dd48814fe7692cf486864e622f6850760066bf1e1c888eb7c9eca2bcca\": RecentStats: unable to find data in memory cache]" Sep 30 01:32:19 crc kubenswrapper[4809]: I0930 01:32:19.710009 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0db36c53-4e80-4ad1-b584-756fd20c96e9" path="/var/lib/kubelet/pods/0db36c53-4e80-4ad1-b584-756fd20c96e9/volumes" Sep 30 01:32:25 crc kubenswrapper[4809]: I0930 01:32:25.324823 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:32:25 crc kubenswrapper[4809]: I0930 01:32:25.325508 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:32:25 crc kubenswrapper[4809]: I0930 01:32:25.325580 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 01:32:25 crc kubenswrapper[4809]: I0930 01:32:25.326826 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 01:32:25 crc kubenswrapper[4809]: I0930 01:32:25.326930 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" gracePeriod=600 Sep 30 01:32:25 crc kubenswrapper[4809]: E0930 01:32:25.465326 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:32:26 crc kubenswrapper[4809]: I0930 01:32:26.113014 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" exitCode=0 Sep 30 01:32:26 crc kubenswrapper[4809]: I0930 01:32:26.113089 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8"} Sep 30 01:32:26 crc kubenswrapper[4809]: I0930 01:32:26.113142 4809 scope.go:117] "RemoveContainer" containerID="19a1442afdfa18c0d27f18c243884121c78e39e1fdc5c50afe899f2c22a5eea6" Sep 30 01:32:26 crc kubenswrapper[4809]: I0930 01:32:26.114271 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:32:26 crc kubenswrapper[4809]: E0930 01:32:26.114911 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:32:38 crc kubenswrapper[4809]: I0930 01:32:38.691832 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:32:38 crc kubenswrapper[4809]: E0930 01:32:38.692812 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:32:49 crc kubenswrapper[4809]: I0930 01:32:49.703197 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:32:49 crc kubenswrapper[4809]: E0930 01:32:49.704245 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:33:02 crc kubenswrapper[4809]: I0930 01:33:02.691463 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:33:02 crc kubenswrapper[4809]: E0930 01:33:02.692385 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:33:14 crc kubenswrapper[4809]: I0930 01:33:14.692023 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:33:14 crc kubenswrapper[4809]: E0930 01:33:14.693013 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:33:29 crc kubenswrapper[4809]: I0930 01:33:29.708490 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:33:29 crc kubenswrapper[4809]: E0930 01:33:29.709783 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:33:42 crc kubenswrapper[4809]: I0930 01:33:42.691870 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:33:42 crc kubenswrapper[4809]: E0930 01:33:42.693147 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:33:54 crc kubenswrapper[4809]: I0930 01:33:54.691042 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:33:54 crc kubenswrapper[4809]: E0930 01:33:54.691736 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:34:08 crc kubenswrapper[4809]: I0930 01:34:08.692213 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:34:08 crc kubenswrapper[4809]: E0930 01:34:08.692933 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:34:21 crc kubenswrapper[4809]: I0930 01:34:21.691197 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:34:21 crc kubenswrapper[4809]: E0930 01:34:21.692013 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:34:32 crc kubenswrapper[4809]: I0930 01:34:32.691339 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:34:32 crc kubenswrapper[4809]: E0930 01:34:32.692165 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:34:46 crc kubenswrapper[4809]: I0930 01:34:46.691954 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:34:46 crc kubenswrapper[4809]: E0930 01:34:46.693194 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:34:58 crc kubenswrapper[4809]: I0930 01:34:58.691546 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:34:58 crc kubenswrapper[4809]: E0930 01:34:58.692286 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:35:11 crc kubenswrapper[4809]: I0930 01:35:11.691560 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:35:11 crc kubenswrapper[4809]: E0930 01:35:11.692647 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:35:23 crc kubenswrapper[4809]: I0930 01:35:23.695275 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:35:23 crc kubenswrapper[4809]: E0930 01:35:23.696046 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:35:34 crc kubenswrapper[4809]: I0930 01:35:34.692681 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:35:34 crc kubenswrapper[4809]: E0930 01:35:34.693809 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:35:48 crc kubenswrapper[4809]: I0930 01:35:48.691457 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:35:48 crc kubenswrapper[4809]: E0930 01:35:48.694511 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:36:02 crc kubenswrapper[4809]: I0930 01:36:02.691734 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:36:02 crc kubenswrapper[4809]: E0930 01:36:02.692877 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:36:17 crc kubenswrapper[4809]: I0930 01:36:17.691232 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:36:17 crc kubenswrapper[4809]: E0930 01:36:17.692336 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:36:30 crc kubenswrapper[4809]: I0930 01:36:30.690856 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:36:30 crc kubenswrapper[4809]: E0930 01:36:30.691767 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:36:44 crc kubenswrapper[4809]: I0930 01:36:44.691684 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:36:44 crc kubenswrapper[4809]: E0930 01:36:44.692799 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:36:47 crc kubenswrapper[4809]: I0930 01:36:47.077103 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-4hp49"] Sep 30 01:36:47 crc kubenswrapper[4809]: I0930 01:36:47.089681 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-4hp49"] Sep 30 01:36:47 crc kubenswrapper[4809]: I0930 01:36:47.737507 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf" path="/var/lib/kubelet/pods/b2ecaf9e-925e-466a-9d32-2b5bd7ff7bdf/volumes" Sep 30 01:36:58 crc kubenswrapper[4809]: I0930 01:36:58.691567 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:36:58 crc kubenswrapper[4809]: E0930 01:36:58.693124 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:37:06 crc kubenswrapper[4809]: I0930 01:37:06.044463 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-9476-account-create-6ww2r"] Sep 30 01:37:06 crc kubenswrapper[4809]: I0930 01:37:06.054624 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-9476-account-create-6ww2r"] Sep 30 01:37:07 crc kubenswrapper[4809]: I0930 01:37:07.716924 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d79361f6-f824-40ff-afd1-ab2ca1ff5cb2" path="/var/lib/kubelet/pods/d79361f6-f824-40ff-afd1-ab2ca1ff5cb2/volumes" Sep 30 01:37:09 crc kubenswrapper[4809]: I0930 01:37:09.713027 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:37:09 crc kubenswrapper[4809]: E0930 01:37:09.713770 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:37:19 crc kubenswrapper[4809]: I0930 01:37:19.000000 4809 scope.go:117] "RemoveContainer" containerID="ffe7b26062433c8ce583c1f85c2d7f5ff370ae7d6cfb7bba1f14fcc9818f9ae1" Sep 30 01:37:19 crc kubenswrapper[4809]: I0930 01:37:19.053788 4809 scope.go:117] "RemoveContainer" containerID="1b787b78deaa0276ea8e36737f24a128810e2a74ed35cb6bf74305e0964767ee" Sep 30 01:37:24 crc kubenswrapper[4809]: I0930 01:37:24.692010 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:37:24 crc kubenswrapper[4809]: E0930 01:37:24.693421 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:37:26 crc kubenswrapper[4809]: I0930 01:37:26.035991 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-6ct68"] Sep 30 01:37:26 crc kubenswrapper[4809]: I0930 01:37:26.049278 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-6ct68"] Sep 30 01:37:27 crc kubenswrapper[4809]: I0930 01:37:27.703452 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc535060-3a0f-47f2-b7d7-1bbb28a367dc" path="/var/lib/kubelet/pods/dc535060-3a0f-47f2-b7d7-1bbb28a367dc/volumes" Sep 30 01:37:36 crc kubenswrapper[4809]: I0930 01:37:36.693408 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:37:37 crc kubenswrapper[4809]: I0930 01:37:37.994191 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"89d4117d6494dc08f9753a0441ed5cdeca77976220539ebf1a8212fb6dcd7902"} Sep 30 01:38:13 crc kubenswrapper[4809]: I0930 01:38:13.974997 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vlp8h"] Sep 30 01:38:13 crc kubenswrapper[4809]: E0930 01:38:13.976120 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0db36c53-4e80-4ad1-b584-756fd20c96e9" containerName="registry-server" Sep 30 01:38:13 crc kubenswrapper[4809]: I0930 01:38:13.976137 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="0db36c53-4e80-4ad1-b584-756fd20c96e9" containerName="registry-server" Sep 30 01:38:13 crc kubenswrapper[4809]: E0930 01:38:13.976164 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c380665-1c38-43d6-a0c1-ed2e639e9225" containerName="registry-server" Sep 30 01:38:13 crc kubenswrapper[4809]: I0930 01:38:13.976173 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c380665-1c38-43d6-a0c1-ed2e639e9225" containerName="registry-server" Sep 30 01:38:13 crc kubenswrapper[4809]: E0930 01:38:13.976188 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0db36c53-4e80-4ad1-b584-756fd20c96e9" containerName="extract-content" Sep 30 01:38:13 crc kubenswrapper[4809]: I0930 01:38:13.976195 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="0db36c53-4e80-4ad1-b584-756fd20c96e9" containerName="extract-content" Sep 30 01:38:13 crc kubenswrapper[4809]: E0930 01:38:13.976218 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0db36c53-4e80-4ad1-b584-756fd20c96e9" containerName="extract-utilities" Sep 30 01:38:13 crc kubenswrapper[4809]: I0930 01:38:13.976226 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="0db36c53-4e80-4ad1-b584-756fd20c96e9" containerName="extract-utilities" Sep 30 01:38:13 crc kubenswrapper[4809]: E0930 01:38:13.976239 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c380665-1c38-43d6-a0c1-ed2e639e9225" containerName="extract-utilities" Sep 30 01:38:13 crc kubenswrapper[4809]: I0930 01:38:13.976246 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c380665-1c38-43d6-a0c1-ed2e639e9225" containerName="extract-utilities" Sep 30 01:38:13 crc kubenswrapper[4809]: E0930 01:38:13.976268 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c380665-1c38-43d6-a0c1-ed2e639e9225" containerName="extract-content" Sep 30 01:38:13 crc kubenswrapper[4809]: I0930 01:38:13.976275 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c380665-1c38-43d6-a0c1-ed2e639e9225" containerName="extract-content" Sep 30 01:38:13 crc kubenswrapper[4809]: I0930 01:38:13.976491 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="0db36c53-4e80-4ad1-b584-756fd20c96e9" containerName="registry-server" Sep 30 01:38:13 crc kubenswrapper[4809]: I0930 01:38:13.976505 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c380665-1c38-43d6-a0c1-ed2e639e9225" containerName="registry-server" Sep 30 01:38:13 crc kubenswrapper[4809]: I0930 01:38:13.978556 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vlp8h" Sep 30 01:38:13 crc kubenswrapper[4809]: I0930 01:38:13.998497 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vlp8h"] Sep 30 01:38:14 crc kubenswrapper[4809]: I0930 01:38:14.172740 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qdsm\" (UniqueName: \"kubernetes.io/projected/1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2-kube-api-access-8qdsm\") pod \"certified-operators-vlp8h\" (UID: \"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2\") " pod="openshift-marketplace/certified-operators-vlp8h" Sep 30 01:38:14 crc kubenswrapper[4809]: I0930 01:38:14.172819 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2-utilities\") pod \"certified-operators-vlp8h\" (UID: \"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2\") " pod="openshift-marketplace/certified-operators-vlp8h" Sep 30 01:38:14 crc kubenswrapper[4809]: I0930 01:38:14.172848 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2-catalog-content\") pod \"certified-operators-vlp8h\" (UID: \"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2\") " pod="openshift-marketplace/certified-operators-vlp8h" Sep 30 01:38:14 crc kubenswrapper[4809]: I0930 01:38:14.274743 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2-catalog-content\") pod \"certified-operators-vlp8h\" (UID: \"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2\") " pod="openshift-marketplace/certified-operators-vlp8h" Sep 30 01:38:14 crc kubenswrapper[4809]: I0930 01:38:14.274994 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qdsm\" (UniqueName: \"kubernetes.io/projected/1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2-kube-api-access-8qdsm\") pod \"certified-operators-vlp8h\" (UID: \"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2\") " pod="openshift-marketplace/certified-operators-vlp8h" Sep 30 01:38:14 crc kubenswrapper[4809]: I0930 01:38:14.275081 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2-utilities\") pod \"certified-operators-vlp8h\" (UID: \"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2\") " pod="openshift-marketplace/certified-operators-vlp8h" Sep 30 01:38:14 crc kubenswrapper[4809]: I0930 01:38:14.275321 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2-catalog-content\") pod \"certified-operators-vlp8h\" (UID: \"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2\") " pod="openshift-marketplace/certified-operators-vlp8h" Sep 30 01:38:14 crc kubenswrapper[4809]: I0930 01:38:14.275414 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2-utilities\") pod \"certified-operators-vlp8h\" (UID: \"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2\") " pod="openshift-marketplace/certified-operators-vlp8h" Sep 30 01:38:14 crc kubenswrapper[4809]: I0930 01:38:14.294304 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qdsm\" (UniqueName: \"kubernetes.io/projected/1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2-kube-api-access-8qdsm\") pod \"certified-operators-vlp8h\" (UID: \"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2\") " pod="openshift-marketplace/certified-operators-vlp8h" Sep 30 01:38:14 crc kubenswrapper[4809]: I0930 01:38:14.302141 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vlp8h" Sep 30 01:38:14 crc kubenswrapper[4809]: I0930 01:38:14.987228 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vlp8h"] Sep 30 01:38:15 crc kubenswrapper[4809]: I0930 01:38:15.426553 4809 generic.go:334] "Generic (PLEG): container finished" podID="1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2" containerID="4d41a08146b5587b4e359ae5ab45254964d88e7ff3c799d67233ce4235b9f651" exitCode=0 Sep 30 01:38:15 crc kubenswrapper[4809]: I0930 01:38:15.426613 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vlp8h" event={"ID":"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2","Type":"ContainerDied","Data":"4d41a08146b5587b4e359ae5ab45254964d88e7ff3c799d67233ce4235b9f651"} Sep 30 01:38:15 crc kubenswrapper[4809]: I0930 01:38:15.426975 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vlp8h" event={"ID":"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2","Type":"ContainerStarted","Data":"467fbfc212b05dac315de6ae6f18c3985c2bf514686abeab51922d851073cd40"} Sep 30 01:38:15 crc kubenswrapper[4809]: I0930 01:38:15.429174 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 01:38:16 crc kubenswrapper[4809]: I0930 01:38:16.440562 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vlp8h" event={"ID":"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2","Type":"ContainerStarted","Data":"efa21cb5f4346af16c03db37ed080b0cf007254e815b9b5c0013fbd5c56ef112"} Sep 30 01:38:18 crc kubenswrapper[4809]: I0930 01:38:18.461358 4809 generic.go:334] "Generic (PLEG): container finished" podID="1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2" containerID="efa21cb5f4346af16c03db37ed080b0cf007254e815b9b5c0013fbd5c56ef112" exitCode=0 Sep 30 01:38:18 crc kubenswrapper[4809]: I0930 01:38:18.461440 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vlp8h" event={"ID":"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2","Type":"ContainerDied","Data":"efa21cb5f4346af16c03db37ed080b0cf007254e815b9b5c0013fbd5c56ef112"} Sep 30 01:38:19 crc kubenswrapper[4809]: I0930 01:38:19.180467 4809 scope.go:117] "RemoveContainer" containerID="89e3ddaedb672590f4c9e97301af5d8748e4ea33d2a2a7d43353ccb61522f12e" Sep 30 01:38:19 crc kubenswrapper[4809]: I0930 01:38:19.475692 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vlp8h" event={"ID":"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2","Type":"ContainerStarted","Data":"a0e987c02e3d1df8f6cfa40133630697f0f745bfd4d0b0099f996af45582f0d6"} Sep 30 01:38:19 crc kubenswrapper[4809]: I0930 01:38:19.501966 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vlp8h" podStartSLOduration=3.031550057 podStartE2EDuration="6.501949614s" podCreationTimestamp="2025-09-30 01:38:13 +0000 UTC" firstStartedPulling="2025-09-30 01:38:15.428933344 +0000 UTC m=+5346.465182752" lastFinishedPulling="2025-09-30 01:38:18.899332901 +0000 UTC m=+5349.935582309" observedRunningTime="2025-09-30 01:38:19.49857598 +0000 UTC m=+5350.534825388" watchObservedRunningTime="2025-09-30 01:38:19.501949614 +0000 UTC m=+5350.538199022" Sep 30 01:38:24 crc kubenswrapper[4809]: I0930 01:38:24.302540 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vlp8h" Sep 30 01:38:24 crc kubenswrapper[4809]: I0930 01:38:24.303072 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vlp8h" Sep 30 01:38:24 crc kubenswrapper[4809]: I0930 01:38:24.364411 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vlp8h" Sep 30 01:38:24 crc kubenswrapper[4809]: I0930 01:38:24.575506 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vlp8h" Sep 30 01:38:24 crc kubenswrapper[4809]: I0930 01:38:24.626547 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vlp8h"] Sep 30 01:38:26 crc kubenswrapper[4809]: I0930 01:38:26.550418 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vlp8h" podUID="1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2" containerName="registry-server" containerID="cri-o://a0e987c02e3d1df8f6cfa40133630697f0f745bfd4d0b0099f996af45582f0d6" gracePeriod=2 Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.154325 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vlp8h" Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.316825 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2-utilities\") pod \"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2\" (UID: \"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2\") " Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.316958 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2-catalog-content\") pod \"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2\" (UID: \"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2\") " Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.317092 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8qdsm\" (UniqueName: \"kubernetes.io/projected/1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2-kube-api-access-8qdsm\") pod \"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2\" (UID: \"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2\") " Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.317942 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2-utilities" (OuterVolumeSpecName: "utilities") pod "1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2" (UID: "1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.318483 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.322868 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2-kube-api-access-8qdsm" (OuterVolumeSpecName: "kube-api-access-8qdsm") pod "1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2" (UID: "1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2"). InnerVolumeSpecName "kube-api-access-8qdsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.354466 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2" (UID: "1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.420524 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.420563 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8qdsm\" (UniqueName: \"kubernetes.io/projected/1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2-kube-api-access-8qdsm\") on node \"crc\" DevicePath \"\"" Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.562963 4809 generic.go:334] "Generic (PLEG): container finished" podID="1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2" containerID="a0e987c02e3d1df8f6cfa40133630697f0f745bfd4d0b0099f996af45582f0d6" exitCode=0 Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.563021 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vlp8h" event={"ID":"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2","Type":"ContainerDied","Data":"a0e987c02e3d1df8f6cfa40133630697f0f745bfd4d0b0099f996af45582f0d6"} Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.563114 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vlp8h" event={"ID":"1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2","Type":"ContainerDied","Data":"467fbfc212b05dac315de6ae6f18c3985c2bf514686abeab51922d851073cd40"} Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.563054 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vlp8h" Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.563168 4809 scope.go:117] "RemoveContainer" containerID="a0e987c02e3d1df8f6cfa40133630697f0f745bfd4d0b0099f996af45582f0d6" Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.603134 4809 scope.go:117] "RemoveContainer" containerID="efa21cb5f4346af16c03db37ed080b0cf007254e815b9b5c0013fbd5c56ef112" Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.605016 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vlp8h"] Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.615986 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vlp8h"] Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.635052 4809 scope.go:117] "RemoveContainer" containerID="4d41a08146b5587b4e359ae5ab45254964d88e7ff3c799d67233ce4235b9f651" Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.708231 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2" path="/var/lib/kubelet/pods/1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2/volumes" Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.709149 4809 scope.go:117] "RemoveContainer" containerID="a0e987c02e3d1df8f6cfa40133630697f0f745bfd4d0b0099f996af45582f0d6" Sep 30 01:38:27 crc kubenswrapper[4809]: E0930 01:38:27.709777 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0e987c02e3d1df8f6cfa40133630697f0f745bfd4d0b0099f996af45582f0d6\": container with ID starting with a0e987c02e3d1df8f6cfa40133630697f0f745bfd4d0b0099f996af45582f0d6 not found: ID does not exist" containerID="a0e987c02e3d1df8f6cfa40133630697f0f745bfd4d0b0099f996af45582f0d6" Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.709829 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0e987c02e3d1df8f6cfa40133630697f0f745bfd4d0b0099f996af45582f0d6"} err="failed to get container status \"a0e987c02e3d1df8f6cfa40133630697f0f745bfd4d0b0099f996af45582f0d6\": rpc error: code = NotFound desc = could not find container \"a0e987c02e3d1df8f6cfa40133630697f0f745bfd4d0b0099f996af45582f0d6\": container with ID starting with a0e987c02e3d1df8f6cfa40133630697f0f745bfd4d0b0099f996af45582f0d6 not found: ID does not exist" Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.709869 4809 scope.go:117] "RemoveContainer" containerID="efa21cb5f4346af16c03db37ed080b0cf007254e815b9b5c0013fbd5c56ef112" Sep 30 01:38:27 crc kubenswrapper[4809]: E0930 01:38:27.710293 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efa21cb5f4346af16c03db37ed080b0cf007254e815b9b5c0013fbd5c56ef112\": container with ID starting with efa21cb5f4346af16c03db37ed080b0cf007254e815b9b5c0013fbd5c56ef112 not found: ID does not exist" containerID="efa21cb5f4346af16c03db37ed080b0cf007254e815b9b5c0013fbd5c56ef112" Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.710328 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efa21cb5f4346af16c03db37ed080b0cf007254e815b9b5c0013fbd5c56ef112"} err="failed to get container status \"efa21cb5f4346af16c03db37ed080b0cf007254e815b9b5c0013fbd5c56ef112\": rpc error: code = NotFound desc = could not find container \"efa21cb5f4346af16c03db37ed080b0cf007254e815b9b5c0013fbd5c56ef112\": container with ID starting with efa21cb5f4346af16c03db37ed080b0cf007254e815b9b5c0013fbd5c56ef112 not found: ID does not exist" Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.710350 4809 scope.go:117] "RemoveContainer" containerID="4d41a08146b5587b4e359ae5ab45254964d88e7ff3c799d67233ce4235b9f651" Sep 30 01:38:27 crc kubenswrapper[4809]: E0930 01:38:27.710701 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d41a08146b5587b4e359ae5ab45254964d88e7ff3c799d67233ce4235b9f651\": container with ID starting with 4d41a08146b5587b4e359ae5ab45254964d88e7ff3c799d67233ce4235b9f651 not found: ID does not exist" containerID="4d41a08146b5587b4e359ae5ab45254964d88e7ff3c799d67233ce4235b9f651" Sep 30 01:38:27 crc kubenswrapper[4809]: I0930 01:38:27.710720 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d41a08146b5587b4e359ae5ab45254964d88e7ff3c799d67233ce4235b9f651"} err="failed to get container status \"4d41a08146b5587b4e359ae5ab45254964d88e7ff3c799d67233ce4235b9f651\": rpc error: code = NotFound desc = could not find container \"4d41a08146b5587b4e359ae5ab45254964d88e7ff3c799d67233ce4235b9f651\": container with ID starting with 4d41a08146b5587b4e359ae5ab45254964d88e7ff3c799d67233ce4235b9f651 not found: ID does not exist" Sep 30 01:39:06 crc kubenswrapper[4809]: I0930 01:39:06.874963 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tqqpr"] Sep 30 01:39:06 crc kubenswrapper[4809]: E0930 01:39:06.875981 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2" containerName="extract-content" Sep 30 01:39:06 crc kubenswrapper[4809]: I0930 01:39:06.875994 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2" containerName="extract-content" Sep 30 01:39:06 crc kubenswrapper[4809]: E0930 01:39:06.876013 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2" containerName="extract-utilities" Sep 30 01:39:06 crc kubenswrapper[4809]: I0930 01:39:06.876019 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2" containerName="extract-utilities" Sep 30 01:39:06 crc kubenswrapper[4809]: E0930 01:39:06.876043 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2" containerName="registry-server" Sep 30 01:39:06 crc kubenswrapper[4809]: I0930 01:39:06.876048 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2" containerName="registry-server" Sep 30 01:39:06 crc kubenswrapper[4809]: I0930 01:39:06.876285 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f4c88e4-34c2-49c4-abb2-4767aa4ff3a2" containerName="registry-server" Sep 30 01:39:06 crc kubenswrapper[4809]: I0930 01:39:06.877925 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tqqpr" Sep 30 01:39:06 crc kubenswrapper[4809]: I0930 01:39:06.888589 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tqqpr"] Sep 30 01:39:06 crc kubenswrapper[4809]: I0930 01:39:06.924239 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acad7875-c58d-4175-acc1-4b29f1282ba0-utilities\") pod \"community-operators-tqqpr\" (UID: \"acad7875-c58d-4175-acc1-4b29f1282ba0\") " pod="openshift-marketplace/community-operators-tqqpr" Sep 30 01:39:06 crc kubenswrapper[4809]: I0930 01:39:06.924355 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmp88\" (UniqueName: \"kubernetes.io/projected/acad7875-c58d-4175-acc1-4b29f1282ba0-kube-api-access-mmp88\") pod \"community-operators-tqqpr\" (UID: \"acad7875-c58d-4175-acc1-4b29f1282ba0\") " pod="openshift-marketplace/community-operators-tqqpr" Sep 30 01:39:06 crc kubenswrapper[4809]: I0930 01:39:06.924400 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acad7875-c58d-4175-acc1-4b29f1282ba0-catalog-content\") pod \"community-operators-tqqpr\" (UID: \"acad7875-c58d-4175-acc1-4b29f1282ba0\") " pod="openshift-marketplace/community-operators-tqqpr" Sep 30 01:39:07 crc kubenswrapper[4809]: I0930 01:39:07.026307 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acad7875-c58d-4175-acc1-4b29f1282ba0-utilities\") pod \"community-operators-tqqpr\" (UID: \"acad7875-c58d-4175-acc1-4b29f1282ba0\") " pod="openshift-marketplace/community-operators-tqqpr" Sep 30 01:39:07 crc kubenswrapper[4809]: I0930 01:39:07.026385 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmp88\" (UniqueName: \"kubernetes.io/projected/acad7875-c58d-4175-acc1-4b29f1282ba0-kube-api-access-mmp88\") pod \"community-operators-tqqpr\" (UID: \"acad7875-c58d-4175-acc1-4b29f1282ba0\") " pod="openshift-marketplace/community-operators-tqqpr" Sep 30 01:39:07 crc kubenswrapper[4809]: I0930 01:39:07.026436 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acad7875-c58d-4175-acc1-4b29f1282ba0-catalog-content\") pod \"community-operators-tqqpr\" (UID: \"acad7875-c58d-4175-acc1-4b29f1282ba0\") " pod="openshift-marketplace/community-operators-tqqpr" Sep 30 01:39:07 crc kubenswrapper[4809]: I0930 01:39:07.026988 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acad7875-c58d-4175-acc1-4b29f1282ba0-catalog-content\") pod \"community-operators-tqqpr\" (UID: \"acad7875-c58d-4175-acc1-4b29f1282ba0\") " pod="openshift-marketplace/community-operators-tqqpr" Sep 30 01:39:07 crc kubenswrapper[4809]: I0930 01:39:07.027017 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acad7875-c58d-4175-acc1-4b29f1282ba0-utilities\") pod \"community-operators-tqqpr\" (UID: \"acad7875-c58d-4175-acc1-4b29f1282ba0\") " pod="openshift-marketplace/community-operators-tqqpr" Sep 30 01:39:07 crc kubenswrapper[4809]: I0930 01:39:07.051552 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmp88\" (UniqueName: \"kubernetes.io/projected/acad7875-c58d-4175-acc1-4b29f1282ba0-kube-api-access-mmp88\") pod \"community-operators-tqqpr\" (UID: \"acad7875-c58d-4175-acc1-4b29f1282ba0\") " pod="openshift-marketplace/community-operators-tqqpr" Sep 30 01:39:07 crc kubenswrapper[4809]: I0930 01:39:07.207964 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tqqpr" Sep 30 01:39:07 crc kubenswrapper[4809]: I0930 01:39:07.789806 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tqqpr"] Sep 30 01:39:08 crc kubenswrapper[4809]: I0930 01:39:08.034412 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tqqpr" event={"ID":"acad7875-c58d-4175-acc1-4b29f1282ba0","Type":"ContainerStarted","Data":"3ea4865d924b6cfb1e2dbfb28624ae714ff04c6a3795f90ceca132a275dc355f"} Sep 30 01:39:08 crc kubenswrapper[4809]: I0930 01:39:08.034459 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tqqpr" event={"ID":"acad7875-c58d-4175-acc1-4b29f1282ba0","Type":"ContainerStarted","Data":"19d29ef3f6cc7db524c51b94251c97bee35304a540c95a459ce65c28485f3b4c"} Sep 30 01:39:09 crc kubenswrapper[4809]: I0930 01:39:09.049391 4809 generic.go:334] "Generic (PLEG): container finished" podID="acad7875-c58d-4175-acc1-4b29f1282ba0" containerID="3ea4865d924b6cfb1e2dbfb28624ae714ff04c6a3795f90ceca132a275dc355f" exitCode=0 Sep 30 01:39:09 crc kubenswrapper[4809]: I0930 01:39:09.049509 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tqqpr" event={"ID":"acad7875-c58d-4175-acc1-4b29f1282ba0","Type":"ContainerDied","Data":"3ea4865d924b6cfb1e2dbfb28624ae714ff04c6a3795f90ceca132a275dc355f"} Sep 30 01:39:11 crc kubenswrapper[4809]: I0930 01:39:11.070714 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tqqpr" event={"ID":"acad7875-c58d-4175-acc1-4b29f1282ba0","Type":"ContainerStarted","Data":"99a2ed40483cf0189e64aa153edbdc1245d629254082761ef7286ced05a8e84c"} Sep 30 01:39:12 crc kubenswrapper[4809]: I0930 01:39:12.092563 4809 generic.go:334] "Generic (PLEG): container finished" podID="acad7875-c58d-4175-acc1-4b29f1282ba0" containerID="99a2ed40483cf0189e64aa153edbdc1245d629254082761ef7286ced05a8e84c" exitCode=0 Sep 30 01:39:12 crc kubenswrapper[4809]: I0930 01:39:12.093335 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tqqpr" event={"ID":"acad7875-c58d-4175-acc1-4b29f1282ba0","Type":"ContainerDied","Data":"99a2ed40483cf0189e64aa153edbdc1245d629254082761ef7286ced05a8e84c"} Sep 30 01:39:13 crc kubenswrapper[4809]: I0930 01:39:13.111634 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tqqpr" event={"ID":"acad7875-c58d-4175-acc1-4b29f1282ba0","Type":"ContainerStarted","Data":"9047342240acd5b1c35d3708879e926011a6a549bd1d6edb92e1beaa312737bc"} Sep 30 01:39:13 crc kubenswrapper[4809]: I0930 01:39:13.140546 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tqqpr" podStartSLOduration=3.651648935 podStartE2EDuration="7.140530124s" podCreationTimestamp="2025-09-30 01:39:06 +0000 UTC" firstStartedPulling="2025-09-30 01:39:09.05217745 +0000 UTC m=+5400.088426868" lastFinishedPulling="2025-09-30 01:39:12.541058649 +0000 UTC m=+5403.577308057" observedRunningTime="2025-09-30 01:39:13.137614224 +0000 UTC m=+5404.173863632" watchObservedRunningTime="2025-09-30 01:39:13.140530124 +0000 UTC m=+5404.176779532" Sep 30 01:39:17 crc kubenswrapper[4809]: I0930 01:39:17.209010 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tqqpr" Sep 30 01:39:17 crc kubenswrapper[4809]: I0930 01:39:17.209562 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tqqpr" Sep 30 01:39:18 crc kubenswrapper[4809]: I0930 01:39:18.276608 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-tqqpr" podUID="acad7875-c58d-4175-acc1-4b29f1282ba0" containerName="registry-server" probeResult="failure" output=< Sep 30 01:39:18 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 01:39:18 crc kubenswrapper[4809]: > Sep 30 01:39:27 crc kubenswrapper[4809]: I0930 01:39:27.274569 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tqqpr" Sep 30 01:39:27 crc kubenswrapper[4809]: I0930 01:39:27.329701 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tqqpr" Sep 30 01:39:27 crc kubenswrapper[4809]: I0930 01:39:27.516885 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tqqpr"] Sep 30 01:39:29 crc kubenswrapper[4809]: I0930 01:39:29.291692 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tqqpr" podUID="acad7875-c58d-4175-acc1-4b29f1282ba0" containerName="registry-server" containerID="cri-o://9047342240acd5b1c35d3708879e926011a6a549bd1d6edb92e1beaa312737bc" gracePeriod=2 Sep 30 01:39:29 crc kubenswrapper[4809]: I0930 01:39:29.850090 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tqqpr" Sep 30 01:39:29 crc kubenswrapper[4809]: I0930 01:39:29.962264 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acad7875-c58d-4175-acc1-4b29f1282ba0-catalog-content\") pod \"acad7875-c58d-4175-acc1-4b29f1282ba0\" (UID: \"acad7875-c58d-4175-acc1-4b29f1282ba0\") " Sep 30 01:39:29 crc kubenswrapper[4809]: I0930 01:39:29.962328 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acad7875-c58d-4175-acc1-4b29f1282ba0-utilities\") pod \"acad7875-c58d-4175-acc1-4b29f1282ba0\" (UID: \"acad7875-c58d-4175-acc1-4b29f1282ba0\") " Sep 30 01:39:29 crc kubenswrapper[4809]: I0930 01:39:29.962417 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmp88\" (UniqueName: \"kubernetes.io/projected/acad7875-c58d-4175-acc1-4b29f1282ba0-kube-api-access-mmp88\") pod \"acad7875-c58d-4175-acc1-4b29f1282ba0\" (UID: \"acad7875-c58d-4175-acc1-4b29f1282ba0\") " Sep 30 01:39:29 crc kubenswrapper[4809]: I0930 01:39:29.964018 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acad7875-c58d-4175-acc1-4b29f1282ba0-utilities" (OuterVolumeSpecName: "utilities") pod "acad7875-c58d-4175-acc1-4b29f1282ba0" (UID: "acad7875-c58d-4175-acc1-4b29f1282ba0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:39:29 crc kubenswrapper[4809]: I0930 01:39:29.969859 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acad7875-c58d-4175-acc1-4b29f1282ba0-kube-api-access-mmp88" (OuterVolumeSpecName: "kube-api-access-mmp88") pod "acad7875-c58d-4175-acc1-4b29f1282ba0" (UID: "acad7875-c58d-4175-acc1-4b29f1282ba0"). InnerVolumeSpecName "kube-api-access-mmp88". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:39:30 crc kubenswrapper[4809]: I0930 01:39:30.047897 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acad7875-c58d-4175-acc1-4b29f1282ba0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "acad7875-c58d-4175-acc1-4b29f1282ba0" (UID: "acad7875-c58d-4175-acc1-4b29f1282ba0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:39:30 crc kubenswrapper[4809]: I0930 01:39:30.067269 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acad7875-c58d-4175-acc1-4b29f1282ba0-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:39:30 crc kubenswrapper[4809]: I0930 01:39:30.067321 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acad7875-c58d-4175-acc1-4b29f1282ba0-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:39:30 crc kubenswrapper[4809]: I0930 01:39:30.067354 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmp88\" (UniqueName: \"kubernetes.io/projected/acad7875-c58d-4175-acc1-4b29f1282ba0-kube-api-access-mmp88\") on node \"crc\" DevicePath \"\"" Sep 30 01:39:30 crc kubenswrapper[4809]: I0930 01:39:30.308576 4809 generic.go:334] "Generic (PLEG): container finished" podID="acad7875-c58d-4175-acc1-4b29f1282ba0" containerID="9047342240acd5b1c35d3708879e926011a6a549bd1d6edb92e1beaa312737bc" exitCode=0 Sep 30 01:39:30 crc kubenswrapper[4809]: I0930 01:39:30.308619 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tqqpr" event={"ID":"acad7875-c58d-4175-acc1-4b29f1282ba0","Type":"ContainerDied","Data":"9047342240acd5b1c35d3708879e926011a6a549bd1d6edb92e1beaa312737bc"} Sep 30 01:39:30 crc kubenswrapper[4809]: I0930 01:39:30.308669 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tqqpr" event={"ID":"acad7875-c58d-4175-acc1-4b29f1282ba0","Type":"ContainerDied","Data":"19d29ef3f6cc7db524c51b94251c97bee35304a540c95a459ce65c28485f3b4c"} Sep 30 01:39:30 crc kubenswrapper[4809]: I0930 01:39:30.308686 4809 scope.go:117] "RemoveContainer" containerID="9047342240acd5b1c35d3708879e926011a6a549bd1d6edb92e1beaa312737bc" Sep 30 01:39:30 crc kubenswrapper[4809]: I0930 01:39:30.308746 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tqqpr" Sep 30 01:39:30 crc kubenswrapper[4809]: I0930 01:39:30.333730 4809 scope.go:117] "RemoveContainer" containerID="99a2ed40483cf0189e64aa153edbdc1245d629254082761ef7286ced05a8e84c" Sep 30 01:39:30 crc kubenswrapper[4809]: I0930 01:39:30.363345 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tqqpr"] Sep 30 01:39:30 crc kubenswrapper[4809]: I0930 01:39:30.375573 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tqqpr"] Sep 30 01:39:30 crc kubenswrapper[4809]: I0930 01:39:30.392554 4809 scope.go:117] "RemoveContainer" containerID="3ea4865d924b6cfb1e2dbfb28624ae714ff04c6a3795f90ceca132a275dc355f" Sep 30 01:39:30 crc kubenswrapper[4809]: I0930 01:39:30.444379 4809 scope.go:117] "RemoveContainer" containerID="9047342240acd5b1c35d3708879e926011a6a549bd1d6edb92e1beaa312737bc" Sep 30 01:39:30 crc kubenswrapper[4809]: E0930 01:39:30.444921 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9047342240acd5b1c35d3708879e926011a6a549bd1d6edb92e1beaa312737bc\": container with ID starting with 9047342240acd5b1c35d3708879e926011a6a549bd1d6edb92e1beaa312737bc not found: ID does not exist" containerID="9047342240acd5b1c35d3708879e926011a6a549bd1d6edb92e1beaa312737bc" Sep 30 01:39:30 crc kubenswrapper[4809]: I0930 01:39:30.444964 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9047342240acd5b1c35d3708879e926011a6a549bd1d6edb92e1beaa312737bc"} err="failed to get container status \"9047342240acd5b1c35d3708879e926011a6a549bd1d6edb92e1beaa312737bc\": rpc error: code = NotFound desc = could not find container \"9047342240acd5b1c35d3708879e926011a6a549bd1d6edb92e1beaa312737bc\": container with ID starting with 9047342240acd5b1c35d3708879e926011a6a549bd1d6edb92e1beaa312737bc not found: ID does not exist" Sep 30 01:39:30 crc kubenswrapper[4809]: I0930 01:39:30.445004 4809 scope.go:117] "RemoveContainer" containerID="99a2ed40483cf0189e64aa153edbdc1245d629254082761ef7286ced05a8e84c" Sep 30 01:39:30 crc kubenswrapper[4809]: E0930 01:39:30.445798 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99a2ed40483cf0189e64aa153edbdc1245d629254082761ef7286ced05a8e84c\": container with ID starting with 99a2ed40483cf0189e64aa153edbdc1245d629254082761ef7286ced05a8e84c not found: ID does not exist" containerID="99a2ed40483cf0189e64aa153edbdc1245d629254082761ef7286ced05a8e84c" Sep 30 01:39:30 crc kubenswrapper[4809]: I0930 01:39:30.445826 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99a2ed40483cf0189e64aa153edbdc1245d629254082761ef7286ced05a8e84c"} err="failed to get container status \"99a2ed40483cf0189e64aa153edbdc1245d629254082761ef7286ced05a8e84c\": rpc error: code = NotFound desc = could not find container \"99a2ed40483cf0189e64aa153edbdc1245d629254082761ef7286ced05a8e84c\": container with ID starting with 99a2ed40483cf0189e64aa153edbdc1245d629254082761ef7286ced05a8e84c not found: ID does not exist" Sep 30 01:39:30 crc kubenswrapper[4809]: I0930 01:39:30.445847 4809 scope.go:117] "RemoveContainer" containerID="3ea4865d924b6cfb1e2dbfb28624ae714ff04c6a3795f90ceca132a275dc355f" Sep 30 01:39:30 crc kubenswrapper[4809]: E0930 01:39:30.446202 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ea4865d924b6cfb1e2dbfb28624ae714ff04c6a3795f90ceca132a275dc355f\": container with ID starting with 3ea4865d924b6cfb1e2dbfb28624ae714ff04c6a3795f90ceca132a275dc355f not found: ID does not exist" containerID="3ea4865d924b6cfb1e2dbfb28624ae714ff04c6a3795f90ceca132a275dc355f" Sep 30 01:39:30 crc kubenswrapper[4809]: I0930 01:39:30.446249 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ea4865d924b6cfb1e2dbfb28624ae714ff04c6a3795f90ceca132a275dc355f"} err="failed to get container status \"3ea4865d924b6cfb1e2dbfb28624ae714ff04c6a3795f90ceca132a275dc355f\": rpc error: code = NotFound desc = could not find container \"3ea4865d924b6cfb1e2dbfb28624ae714ff04c6a3795f90ceca132a275dc355f\": container with ID starting with 3ea4865d924b6cfb1e2dbfb28624ae714ff04c6a3795f90ceca132a275dc355f not found: ID does not exist" Sep 30 01:39:31 crc kubenswrapper[4809]: I0930 01:39:31.705496 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acad7875-c58d-4175-acc1-4b29f1282ba0" path="/var/lib/kubelet/pods/acad7875-c58d-4175-acc1-4b29f1282ba0/volumes" Sep 30 01:39:55 crc kubenswrapper[4809]: I0930 01:39:55.325659 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:39:55 crc kubenswrapper[4809]: I0930 01:39:55.326265 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:40:25 crc kubenswrapper[4809]: I0930 01:40:25.325181 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:40:25 crc kubenswrapper[4809]: I0930 01:40:25.325713 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:40:55 crc kubenswrapper[4809]: I0930 01:40:55.325118 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:40:55 crc kubenswrapper[4809]: I0930 01:40:55.325718 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:40:55 crc kubenswrapper[4809]: I0930 01:40:55.325767 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 01:40:55 crc kubenswrapper[4809]: I0930 01:40:55.326625 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"89d4117d6494dc08f9753a0441ed5cdeca77976220539ebf1a8212fb6dcd7902"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 01:40:55 crc kubenswrapper[4809]: I0930 01:40:55.326692 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://89d4117d6494dc08f9753a0441ed5cdeca77976220539ebf1a8212fb6dcd7902" gracePeriod=600 Sep 30 01:40:56 crc kubenswrapper[4809]: I0930 01:40:56.373548 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="89d4117d6494dc08f9753a0441ed5cdeca77976220539ebf1a8212fb6dcd7902" exitCode=0 Sep 30 01:40:56 crc kubenswrapper[4809]: I0930 01:40:56.373686 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"89d4117d6494dc08f9753a0441ed5cdeca77976220539ebf1a8212fb6dcd7902"} Sep 30 01:40:56 crc kubenswrapper[4809]: I0930 01:40:56.374304 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21"} Sep 30 01:40:56 crc kubenswrapper[4809]: I0930 01:40:56.374343 4809 scope.go:117] "RemoveContainer" containerID="44e63bb9e1c0bee82cc9c6f7122955635bbcda79690b057133b85244ec56c5b8" Sep 30 01:42:46 crc kubenswrapper[4809]: I0930 01:42:46.931919 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qqskm"] Sep 30 01:42:46 crc kubenswrapper[4809]: E0930 01:42:46.933985 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acad7875-c58d-4175-acc1-4b29f1282ba0" containerName="extract-utilities" Sep 30 01:42:46 crc kubenswrapper[4809]: I0930 01:42:46.934089 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="acad7875-c58d-4175-acc1-4b29f1282ba0" containerName="extract-utilities" Sep 30 01:42:46 crc kubenswrapper[4809]: E0930 01:42:46.934173 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acad7875-c58d-4175-acc1-4b29f1282ba0" containerName="registry-server" Sep 30 01:42:46 crc kubenswrapper[4809]: I0930 01:42:46.934250 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="acad7875-c58d-4175-acc1-4b29f1282ba0" containerName="registry-server" Sep 30 01:42:46 crc kubenswrapper[4809]: E0930 01:42:46.934338 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acad7875-c58d-4175-acc1-4b29f1282ba0" containerName="extract-content" Sep 30 01:42:46 crc kubenswrapper[4809]: I0930 01:42:46.934413 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="acad7875-c58d-4175-acc1-4b29f1282ba0" containerName="extract-content" Sep 30 01:42:46 crc kubenswrapper[4809]: I0930 01:42:46.934857 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="acad7875-c58d-4175-acc1-4b29f1282ba0" containerName="registry-server" Sep 30 01:42:46 crc kubenswrapper[4809]: I0930 01:42:46.936946 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qqskm" Sep 30 01:42:46 crc kubenswrapper[4809]: I0930 01:42:46.952511 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47ccb2b6-a03e-4d28-bab0-93234b6ce963-catalog-content\") pod \"redhat-marketplace-qqskm\" (UID: \"47ccb2b6-a03e-4d28-bab0-93234b6ce963\") " pod="openshift-marketplace/redhat-marketplace-qqskm" Sep 30 01:42:46 crc kubenswrapper[4809]: I0930 01:42:46.952824 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47ccb2b6-a03e-4d28-bab0-93234b6ce963-utilities\") pod \"redhat-marketplace-qqskm\" (UID: \"47ccb2b6-a03e-4d28-bab0-93234b6ce963\") " pod="openshift-marketplace/redhat-marketplace-qqskm" Sep 30 01:42:46 crc kubenswrapper[4809]: I0930 01:42:46.953082 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95z7z\" (UniqueName: \"kubernetes.io/projected/47ccb2b6-a03e-4d28-bab0-93234b6ce963-kube-api-access-95z7z\") pod \"redhat-marketplace-qqskm\" (UID: \"47ccb2b6-a03e-4d28-bab0-93234b6ce963\") " pod="openshift-marketplace/redhat-marketplace-qqskm" Sep 30 01:42:46 crc kubenswrapper[4809]: I0930 01:42:46.968099 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qqskm"] Sep 30 01:42:47 crc kubenswrapper[4809]: I0930 01:42:47.054977 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95z7z\" (UniqueName: \"kubernetes.io/projected/47ccb2b6-a03e-4d28-bab0-93234b6ce963-kube-api-access-95z7z\") pod \"redhat-marketplace-qqskm\" (UID: \"47ccb2b6-a03e-4d28-bab0-93234b6ce963\") " pod="openshift-marketplace/redhat-marketplace-qqskm" Sep 30 01:42:47 crc kubenswrapper[4809]: I0930 01:42:47.055360 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47ccb2b6-a03e-4d28-bab0-93234b6ce963-catalog-content\") pod \"redhat-marketplace-qqskm\" (UID: \"47ccb2b6-a03e-4d28-bab0-93234b6ce963\") " pod="openshift-marketplace/redhat-marketplace-qqskm" Sep 30 01:42:47 crc kubenswrapper[4809]: I0930 01:42:47.055443 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47ccb2b6-a03e-4d28-bab0-93234b6ce963-utilities\") pod \"redhat-marketplace-qqskm\" (UID: \"47ccb2b6-a03e-4d28-bab0-93234b6ce963\") " pod="openshift-marketplace/redhat-marketplace-qqskm" Sep 30 01:42:47 crc kubenswrapper[4809]: I0930 01:42:47.055891 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47ccb2b6-a03e-4d28-bab0-93234b6ce963-catalog-content\") pod \"redhat-marketplace-qqskm\" (UID: \"47ccb2b6-a03e-4d28-bab0-93234b6ce963\") " pod="openshift-marketplace/redhat-marketplace-qqskm" Sep 30 01:42:47 crc kubenswrapper[4809]: I0930 01:42:47.056072 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47ccb2b6-a03e-4d28-bab0-93234b6ce963-utilities\") pod \"redhat-marketplace-qqskm\" (UID: \"47ccb2b6-a03e-4d28-bab0-93234b6ce963\") " pod="openshift-marketplace/redhat-marketplace-qqskm" Sep 30 01:42:47 crc kubenswrapper[4809]: I0930 01:42:47.081055 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95z7z\" (UniqueName: \"kubernetes.io/projected/47ccb2b6-a03e-4d28-bab0-93234b6ce963-kube-api-access-95z7z\") pod \"redhat-marketplace-qqskm\" (UID: \"47ccb2b6-a03e-4d28-bab0-93234b6ce963\") " pod="openshift-marketplace/redhat-marketplace-qqskm" Sep 30 01:42:47 crc kubenswrapper[4809]: I0930 01:42:47.268235 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qqskm" Sep 30 01:42:47 crc kubenswrapper[4809]: I0930 01:42:47.764582 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qqskm"] Sep 30 01:42:48 crc kubenswrapper[4809]: I0930 01:42:48.756778 4809 generic.go:334] "Generic (PLEG): container finished" podID="47ccb2b6-a03e-4d28-bab0-93234b6ce963" containerID="b27f3d05759750ec44ccaa0ba0f5944176696b209cce0e89a1c00fdc8dbc8fb6" exitCode=0 Sep 30 01:42:48 crc kubenswrapper[4809]: I0930 01:42:48.756839 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qqskm" event={"ID":"47ccb2b6-a03e-4d28-bab0-93234b6ce963","Type":"ContainerDied","Data":"b27f3d05759750ec44ccaa0ba0f5944176696b209cce0e89a1c00fdc8dbc8fb6"} Sep 30 01:42:48 crc kubenswrapper[4809]: I0930 01:42:48.757117 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qqskm" event={"ID":"47ccb2b6-a03e-4d28-bab0-93234b6ce963","Type":"ContainerStarted","Data":"5290df5d955bd4602827a6024e845eddd998ab11f5a712bd5a43252eb2a8c976"} Sep 30 01:42:50 crc kubenswrapper[4809]: I0930 01:42:50.778193 4809 generic.go:334] "Generic (PLEG): container finished" podID="47ccb2b6-a03e-4d28-bab0-93234b6ce963" containerID="8741bb49ea5f3748a5ea30f6dac0b5245d77f5931f16d0ac158d091e9ad28137" exitCode=0 Sep 30 01:42:50 crc kubenswrapper[4809]: I0930 01:42:50.778247 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qqskm" event={"ID":"47ccb2b6-a03e-4d28-bab0-93234b6ce963","Type":"ContainerDied","Data":"8741bb49ea5f3748a5ea30f6dac0b5245d77f5931f16d0ac158d091e9ad28137"} Sep 30 01:42:51 crc kubenswrapper[4809]: I0930 01:42:51.791803 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qqskm" event={"ID":"47ccb2b6-a03e-4d28-bab0-93234b6ce963","Type":"ContainerStarted","Data":"442ffc406f261194874fcf3094fc6e78bbd08bc01d9a4ae9ea13c5cc981c820f"} Sep 30 01:42:51 crc kubenswrapper[4809]: I0930 01:42:51.818627 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qqskm" podStartSLOduration=3.201500402 podStartE2EDuration="5.81860587s" podCreationTimestamp="2025-09-30 01:42:46 +0000 UTC" firstStartedPulling="2025-09-30 01:42:48.76009412 +0000 UTC m=+5619.796343528" lastFinishedPulling="2025-09-30 01:42:51.377199568 +0000 UTC m=+5622.413448996" observedRunningTime="2025-09-30 01:42:51.814705274 +0000 UTC m=+5622.850954692" watchObservedRunningTime="2025-09-30 01:42:51.81860587 +0000 UTC m=+5622.854855288" Sep 30 01:42:55 crc kubenswrapper[4809]: I0930 01:42:55.324472 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:42:55 crc kubenswrapper[4809]: I0930 01:42:55.324879 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:42:57 crc kubenswrapper[4809]: I0930 01:42:57.279394 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qqskm" Sep 30 01:42:57 crc kubenswrapper[4809]: I0930 01:42:57.280162 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qqskm" Sep 30 01:42:57 crc kubenswrapper[4809]: I0930 01:42:57.331241 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qqskm" Sep 30 01:42:57 crc kubenswrapper[4809]: I0930 01:42:57.946311 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qqskm" Sep 30 01:42:58 crc kubenswrapper[4809]: I0930 01:42:58.007867 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qqskm"] Sep 30 01:42:59 crc kubenswrapper[4809]: I0930 01:42:59.903854 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qqskm" podUID="47ccb2b6-a03e-4d28-bab0-93234b6ce963" containerName="registry-server" containerID="cri-o://442ffc406f261194874fcf3094fc6e78bbd08bc01d9a4ae9ea13c5cc981c820f" gracePeriod=2 Sep 30 01:43:00 crc kubenswrapper[4809]: I0930 01:43:00.487177 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qqskm" Sep 30 01:43:00 crc kubenswrapper[4809]: I0930 01:43:00.583243 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47ccb2b6-a03e-4d28-bab0-93234b6ce963-catalog-content\") pod \"47ccb2b6-a03e-4d28-bab0-93234b6ce963\" (UID: \"47ccb2b6-a03e-4d28-bab0-93234b6ce963\") " Sep 30 01:43:00 crc kubenswrapper[4809]: I0930 01:43:00.583300 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47ccb2b6-a03e-4d28-bab0-93234b6ce963-utilities\") pod \"47ccb2b6-a03e-4d28-bab0-93234b6ce963\" (UID: \"47ccb2b6-a03e-4d28-bab0-93234b6ce963\") " Sep 30 01:43:00 crc kubenswrapper[4809]: I0930 01:43:00.583606 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95z7z\" (UniqueName: \"kubernetes.io/projected/47ccb2b6-a03e-4d28-bab0-93234b6ce963-kube-api-access-95z7z\") pod \"47ccb2b6-a03e-4d28-bab0-93234b6ce963\" (UID: \"47ccb2b6-a03e-4d28-bab0-93234b6ce963\") " Sep 30 01:43:00 crc kubenswrapper[4809]: I0930 01:43:00.586328 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47ccb2b6-a03e-4d28-bab0-93234b6ce963-utilities" (OuterVolumeSpecName: "utilities") pod "47ccb2b6-a03e-4d28-bab0-93234b6ce963" (UID: "47ccb2b6-a03e-4d28-bab0-93234b6ce963"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:43:00 crc kubenswrapper[4809]: I0930 01:43:00.591439 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47ccb2b6-a03e-4d28-bab0-93234b6ce963-kube-api-access-95z7z" (OuterVolumeSpecName: "kube-api-access-95z7z") pod "47ccb2b6-a03e-4d28-bab0-93234b6ce963" (UID: "47ccb2b6-a03e-4d28-bab0-93234b6ce963"). InnerVolumeSpecName "kube-api-access-95z7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:43:00 crc kubenswrapper[4809]: I0930 01:43:00.603067 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47ccb2b6-a03e-4d28-bab0-93234b6ce963-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "47ccb2b6-a03e-4d28-bab0-93234b6ce963" (UID: "47ccb2b6-a03e-4d28-bab0-93234b6ce963"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:43:00 crc kubenswrapper[4809]: I0930 01:43:00.687019 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95z7z\" (UniqueName: \"kubernetes.io/projected/47ccb2b6-a03e-4d28-bab0-93234b6ce963-kube-api-access-95z7z\") on node \"crc\" DevicePath \"\"" Sep 30 01:43:00 crc kubenswrapper[4809]: I0930 01:43:00.687062 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47ccb2b6-a03e-4d28-bab0-93234b6ce963-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:43:00 crc kubenswrapper[4809]: I0930 01:43:00.687076 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47ccb2b6-a03e-4d28-bab0-93234b6ce963-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:43:00 crc kubenswrapper[4809]: I0930 01:43:00.921830 4809 generic.go:334] "Generic (PLEG): container finished" podID="47ccb2b6-a03e-4d28-bab0-93234b6ce963" containerID="442ffc406f261194874fcf3094fc6e78bbd08bc01d9a4ae9ea13c5cc981c820f" exitCode=0 Sep 30 01:43:00 crc kubenswrapper[4809]: I0930 01:43:00.921912 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qqskm" Sep 30 01:43:00 crc kubenswrapper[4809]: I0930 01:43:00.921941 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qqskm" event={"ID":"47ccb2b6-a03e-4d28-bab0-93234b6ce963","Type":"ContainerDied","Data":"442ffc406f261194874fcf3094fc6e78bbd08bc01d9a4ae9ea13c5cc981c820f"} Sep 30 01:43:00 crc kubenswrapper[4809]: I0930 01:43:00.922396 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qqskm" event={"ID":"47ccb2b6-a03e-4d28-bab0-93234b6ce963","Type":"ContainerDied","Data":"5290df5d955bd4602827a6024e845eddd998ab11f5a712bd5a43252eb2a8c976"} Sep 30 01:43:00 crc kubenswrapper[4809]: I0930 01:43:00.922431 4809 scope.go:117] "RemoveContainer" containerID="442ffc406f261194874fcf3094fc6e78bbd08bc01d9a4ae9ea13c5cc981c820f" Sep 30 01:43:00 crc kubenswrapper[4809]: I0930 01:43:00.962718 4809 scope.go:117] "RemoveContainer" containerID="8741bb49ea5f3748a5ea30f6dac0b5245d77f5931f16d0ac158d091e9ad28137" Sep 30 01:43:00 crc kubenswrapper[4809]: I0930 01:43:00.988003 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qqskm"] Sep 30 01:43:00 crc kubenswrapper[4809]: I0930 01:43:00.988500 4809 scope.go:117] "RemoveContainer" containerID="b27f3d05759750ec44ccaa0ba0f5944176696b209cce0e89a1c00fdc8dbc8fb6" Sep 30 01:43:01 crc kubenswrapper[4809]: I0930 01:43:01.008904 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qqskm"] Sep 30 01:43:01 crc kubenswrapper[4809]: I0930 01:43:01.079663 4809 scope.go:117] "RemoveContainer" containerID="442ffc406f261194874fcf3094fc6e78bbd08bc01d9a4ae9ea13c5cc981c820f" Sep 30 01:43:01 crc kubenswrapper[4809]: E0930 01:43:01.080158 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"442ffc406f261194874fcf3094fc6e78bbd08bc01d9a4ae9ea13c5cc981c820f\": container with ID starting with 442ffc406f261194874fcf3094fc6e78bbd08bc01d9a4ae9ea13c5cc981c820f not found: ID does not exist" containerID="442ffc406f261194874fcf3094fc6e78bbd08bc01d9a4ae9ea13c5cc981c820f" Sep 30 01:43:01 crc kubenswrapper[4809]: I0930 01:43:01.080193 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"442ffc406f261194874fcf3094fc6e78bbd08bc01d9a4ae9ea13c5cc981c820f"} err="failed to get container status \"442ffc406f261194874fcf3094fc6e78bbd08bc01d9a4ae9ea13c5cc981c820f\": rpc error: code = NotFound desc = could not find container \"442ffc406f261194874fcf3094fc6e78bbd08bc01d9a4ae9ea13c5cc981c820f\": container with ID starting with 442ffc406f261194874fcf3094fc6e78bbd08bc01d9a4ae9ea13c5cc981c820f not found: ID does not exist" Sep 30 01:43:01 crc kubenswrapper[4809]: I0930 01:43:01.080218 4809 scope.go:117] "RemoveContainer" containerID="8741bb49ea5f3748a5ea30f6dac0b5245d77f5931f16d0ac158d091e9ad28137" Sep 30 01:43:01 crc kubenswrapper[4809]: E0930 01:43:01.080512 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8741bb49ea5f3748a5ea30f6dac0b5245d77f5931f16d0ac158d091e9ad28137\": container with ID starting with 8741bb49ea5f3748a5ea30f6dac0b5245d77f5931f16d0ac158d091e9ad28137 not found: ID does not exist" containerID="8741bb49ea5f3748a5ea30f6dac0b5245d77f5931f16d0ac158d091e9ad28137" Sep 30 01:43:01 crc kubenswrapper[4809]: I0930 01:43:01.080544 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8741bb49ea5f3748a5ea30f6dac0b5245d77f5931f16d0ac158d091e9ad28137"} err="failed to get container status \"8741bb49ea5f3748a5ea30f6dac0b5245d77f5931f16d0ac158d091e9ad28137\": rpc error: code = NotFound desc = could not find container \"8741bb49ea5f3748a5ea30f6dac0b5245d77f5931f16d0ac158d091e9ad28137\": container with ID starting with 8741bb49ea5f3748a5ea30f6dac0b5245d77f5931f16d0ac158d091e9ad28137 not found: ID does not exist" Sep 30 01:43:01 crc kubenswrapper[4809]: I0930 01:43:01.080561 4809 scope.go:117] "RemoveContainer" containerID="b27f3d05759750ec44ccaa0ba0f5944176696b209cce0e89a1c00fdc8dbc8fb6" Sep 30 01:43:01 crc kubenswrapper[4809]: E0930 01:43:01.081047 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b27f3d05759750ec44ccaa0ba0f5944176696b209cce0e89a1c00fdc8dbc8fb6\": container with ID starting with b27f3d05759750ec44ccaa0ba0f5944176696b209cce0e89a1c00fdc8dbc8fb6 not found: ID does not exist" containerID="b27f3d05759750ec44ccaa0ba0f5944176696b209cce0e89a1c00fdc8dbc8fb6" Sep 30 01:43:01 crc kubenswrapper[4809]: I0930 01:43:01.081074 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b27f3d05759750ec44ccaa0ba0f5944176696b209cce0e89a1c00fdc8dbc8fb6"} err="failed to get container status \"b27f3d05759750ec44ccaa0ba0f5944176696b209cce0e89a1c00fdc8dbc8fb6\": rpc error: code = NotFound desc = could not find container \"b27f3d05759750ec44ccaa0ba0f5944176696b209cce0e89a1c00fdc8dbc8fb6\": container with ID starting with b27f3d05759750ec44ccaa0ba0f5944176696b209cce0e89a1c00fdc8dbc8fb6 not found: ID does not exist" Sep 30 01:43:01 crc kubenswrapper[4809]: I0930 01:43:01.718787 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47ccb2b6-a03e-4d28-bab0-93234b6ce963" path="/var/lib/kubelet/pods/47ccb2b6-a03e-4d28-bab0-93234b6ce963/volumes" Sep 30 01:43:25 crc kubenswrapper[4809]: I0930 01:43:25.324810 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:43:25 crc kubenswrapper[4809]: I0930 01:43:25.325462 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:43:55 crc kubenswrapper[4809]: I0930 01:43:55.325417 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:43:55 crc kubenswrapper[4809]: I0930 01:43:55.326245 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:43:55 crc kubenswrapper[4809]: I0930 01:43:55.326346 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 01:43:55 crc kubenswrapper[4809]: I0930 01:43:55.327900 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 01:43:55 crc kubenswrapper[4809]: I0930 01:43:55.327960 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" gracePeriod=600 Sep 30 01:43:55 crc kubenswrapper[4809]: E0930 01:43:55.469160 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:43:55 crc kubenswrapper[4809]: I0930 01:43:55.572104 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" exitCode=0 Sep 30 01:43:55 crc kubenswrapper[4809]: I0930 01:43:55.572149 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21"} Sep 30 01:43:55 crc kubenswrapper[4809]: I0930 01:43:55.572443 4809 scope.go:117] "RemoveContainer" containerID="89d4117d6494dc08f9753a0441ed5cdeca77976220539ebf1a8212fb6dcd7902" Sep 30 01:43:55 crc kubenswrapper[4809]: I0930 01:43:55.573163 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:43:55 crc kubenswrapper[4809]: E0930 01:43:55.573436 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:44:09 crc kubenswrapper[4809]: I0930 01:44:09.697708 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:44:09 crc kubenswrapper[4809]: E0930 01:44:09.698412 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:44:24 crc kubenswrapper[4809]: I0930 01:44:24.691529 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:44:24 crc kubenswrapper[4809]: E0930 01:44:24.693190 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:44:37 crc kubenswrapper[4809]: I0930 01:44:37.691177 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:44:37 crc kubenswrapper[4809]: E0930 01:44:37.692147 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:44:51 crc kubenswrapper[4809]: I0930 01:44:51.691485 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:44:51 crc kubenswrapper[4809]: E0930 01:44:51.692596 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:45:00 crc kubenswrapper[4809]: I0930 01:45:00.213846 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w"] Sep 30 01:45:00 crc kubenswrapper[4809]: E0930 01:45:00.214852 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47ccb2b6-a03e-4d28-bab0-93234b6ce963" containerName="registry-server" Sep 30 01:45:00 crc kubenswrapper[4809]: I0930 01:45:00.214865 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="47ccb2b6-a03e-4d28-bab0-93234b6ce963" containerName="registry-server" Sep 30 01:45:00 crc kubenswrapper[4809]: E0930 01:45:00.214886 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47ccb2b6-a03e-4d28-bab0-93234b6ce963" containerName="extract-utilities" Sep 30 01:45:00 crc kubenswrapper[4809]: I0930 01:45:00.214895 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="47ccb2b6-a03e-4d28-bab0-93234b6ce963" containerName="extract-utilities" Sep 30 01:45:00 crc kubenswrapper[4809]: E0930 01:45:00.214925 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47ccb2b6-a03e-4d28-bab0-93234b6ce963" containerName="extract-content" Sep 30 01:45:00 crc kubenswrapper[4809]: I0930 01:45:00.214933 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="47ccb2b6-a03e-4d28-bab0-93234b6ce963" containerName="extract-content" Sep 30 01:45:00 crc kubenswrapper[4809]: I0930 01:45:00.215162 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="47ccb2b6-a03e-4d28-bab0-93234b6ce963" containerName="registry-server" Sep 30 01:45:00 crc kubenswrapper[4809]: I0930 01:45:00.215958 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w" Sep 30 01:45:00 crc kubenswrapper[4809]: I0930 01:45:00.221171 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 01:45:00 crc kubenswrapper[4809]: I0930 01:45:00.221389 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 01:45:00 crc kubenswrapper[4809]: I0930 01:45:00.228341 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/043d1b95-3e33-4acf-a8d9-b17644c28f28-config-volume\") pod \"collect-profiles-29319945-bnw6w\" (UID: \"043d1b95-3e33-4acf-a8d9-b17644c28f28\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w" Sep 30 01:45:00 crc kubenswrapper[4809]: I0930 01:45:00.228485 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzskv\" (UniqueName: \"kubernetes.io/projected/043d1b95-3e33-4acf-a8d9-b17644c28f28-kube-api-access-jzskv\") pod \"collect-profiles-29319945-bnw6w\" (UID: \"043d1b95-3e33-4acf-a8d9-b17644c28f28\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w" Sep 30 01:45:00 crc kubenswrapper[4809]: I0930 01:45:00.228889 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/043d1b95-3e33-4acf-a8d9-b17644c28f28-secret-volume\") pod \"collect-profiles-29319945-bnw6w\" (UID: \"043d1b95-3e33-4acf-a8d9-b17644c28f28\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w" Sep 30 01:45:00 crc kubenswrapper[4809]: I0930 01:45:00.250826 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w"] Sep 30 01:45:00 crc kubenswrapper[4809]: I0930 01:45:00.330707 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/043d1b95-3e33-4acf-a8d9-b17644c28f28-config-volume\") pod \"collect-profiles-29319945-bnw6w\" (UID: \"043d1b95-3e33-4acf-a8d9-b17644c28f28\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w" Sep 30 01:45:00 crc kubenswrapper[4809]: I0930 01:45:00.330824 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzskv\" (UniqueName: \"kubernetes.io/projected/043d1b95-3e33-4acf-a8d9-b17644c28f28-kube-api-access-jzskv\") pod \"collect-profiles-29319945-bnw6w\" (UID: \"043d1b95-3e33-4acf-a8d9-b17644c28f28\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w" Sep 30 01:45:00 crc kubenswrapper[4809]: I0930 01:45:00.330915 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/043d1b95-3e33-4acf-a8d9-b17644c28f28-secret-volume\") pod \"collect-profiles-29319945-bnw6w\" (UID: \"043d1b95-3e33-4acf-a8d9-b17644c28f28\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w" Sep 30 01:45:00 crc kubenswrapper[4809]: I0930 01:45:00.331972 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/043d1b95-3e33-4acf-a8d9-b17644c28f28-config-volume\") pod \"collect-profiles-29319945-bnw6w\" (UID: \"043d1b95-3e33-4acf-a8d9-b17644c28f28\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w" Sep 30 01:45:00 crc kubenswrapper[4809]: I0930 01:45:00.341494 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/043d1b95-3e33-4acf-a8d9-b17644c28f28-secret-volume\") pod \"collect-profiles-29319945-bnw6w\" (UID: \"043d1b95-3e33-4acf-a8d9-b17644c28f28\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w" Sep 30 01:45:00 crc kubenswrapper[4809]: I0930 01:45:00.348608 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzskv\" (UniqueName: \"kubernetes.io/projected/043d1b95-3e33-4acf-a8d9-b17644c28f28-kube-api-access-jzskv\") pod \"collect-profiles-29319945-bnw6w\" (UID: \"043d1b95-3e33-4acf-a8d9-b17644c28f28\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w" Sep 30 01:45:00 crc kubenswrapper[4809]: I0930 01:45:00.540490 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w" Sep 30 01:45:01 crc kubenswrapper[4809]: I0930 01:45:01.176030 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w"] Sep 30 01:45:01 crc kubenswrapper[4809]: I0930 01:45:01.352965 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w" event={"ID":"043d1b95-3e33-4acf-a8d9-b17644c28f28","Type":"ContainerStarted","Data":"7c45dba13823ffb6f6e0c94fcb6adc1c4a256e517d6db91fd3a765e96b4bf870"} Sep 30 01:45:02 crc kubenswrapper[4809]: I0930 01:45:02.362673 4809 generic.go:334] "Generic (PLEG): container finished" podID="043d1b95-3e33-4acf-a8d9-b17644c28f28" containerID="bd76e70599e1e48cde7aee2562646ff60bbf210c9450794040e719e37a2e20b7" exitCode=0 Sep 30 01:45:02 crc kubenswrapper[4809]: I0930 01:45:02.363229 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w" event={"ID":"043d1b95-3e33-4acf-a8d9-b17644c28f28","Type":"ContainerDied","Data":"bd76e70599e1e48cde7aee2562646ff60bbf210c9450794040e719e37a2e20b7"} Sep 30 01:45:04 crc kubenswrapper[4809]: I0930 01:45:04.264679 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w" Sep 30 01:45:04 crc kubenswrapper[4809]: I0930 01:45:04.390979 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w" event={"ID":"043d1b95-3e33-4acf-a8d9-b17644c28f28","Type":"ContainerDied","Data":"7c45dba13823ffb6f6e0c94fcb6adc1c4a256e517d6db91fd3a765e96b4bf870"} Sep 30 01:45:04 crc kubenswrapper[4809]: I0930 01:45:04.391036 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c45dba13823ffb6f6e0c94fcb6adc1c4a256e517d6db91fd3a765e96b4bf870" Sep 30 01:45:04 crc kubenswrapper[4809]: I0930 01:45:04.391046 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w" Sep 30 01:45:04 crc kubenswrapper[4809]: I0930 01:45:04.437918 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jzskv\" (UniqueName: \"kubernetes.io/projected/043d1b95-3e33-4acf-a8d9-b17644c28f28-kube-api-access-jzskv\") pod \"043d1b95-3e33-4acf-a8d9-b17644c28f28\" (UID: \"043d1b95-3e33-4acf-a8d9-b17644c28f28\") " Sep 30 01:45:04 crc kubenswrapper[4809]: I0930 01:45:04.437981 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/043d1b95-3e33-4acf-a8d9-b17644c28f28-config-volume\") pod \"043d1b95-3e33-4acf-a8d9-b17644c28f28\" (UID: \"043d1b95-3e33-4acf-a8d9-b17644c28f28\") " Sep 30 01:45:04 crc kubenswrapper[4809]: I0930 01:45:04.438063 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/043d1b95-3e33-4acf-a8d9-b17644c28f28-secret-volume\") pod \"043d1b95-3e33-4acf-a8d9-b17644c28f28\" (UID: \"043d1b95-3e33-4acf-a8d9-b17644c28f28\") " Sep 30 01:45:04 crc kubenswrapper[4809]: I0930 01:45:04.438487 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/043d1b95-3e33-4acf-a8d9-b17644c28f28-config-volume" (OuterVolumeSpecName: "config-volume") pod "043d1b95-3e33-4acf-a8d9-b17644c28f28" (UID: "043d1b95-3e33-4acf-a8d9-b17644c28f28"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:45:04 crc kubenswrapper[4809]: I0930 01:45:04.439095 4809 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/043d1b95-3e33-4acf-a8d9-b17644c28f28-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 01:45:04 crc kubenswrapper[4809]: I0930 01:45:04.445852 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/043d1b95-3e33-4acf-a8d9-b17644c28f28-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "043d1b95-3e33-4acf-a8d9-b17644c28f28" (UID: "043d1b95-3e33-4acf-a8d9-b17644c28f28"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:45:04 crc kubenswrapper[4809]: I0930 01:45:04.448578 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/043d1b95-3e33-4acf-a8d9-b17644c28f28-kube-api-access-jzskv" (OuterVolumeSpecName: "kube-api-access-jzskv") pod "043d1b95-3e33-4acf-a8d9-b17644c28f28" (UID: "043d1b95-3e33-4acf-a8d9-b17644c28f28"). InnerVolumeSpecName "kube-api-access-jzskv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:45:04 crc kubenswrapper[4809]: I0930 01:45:04.540969 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jzskv\" (UniqueName: \"kubernetes.io/projected/043d1b95-3e33-4acf-a8d9-b17644c28f28-kube-api-access-jzskv\") on node \"crc\" DevicePath \"\"" Sep 30 01:45:04 crc kubenswrapper[4809]: I0930 01:45:04.541282 4809 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/043d1b95-3e33-4acf-a8d9-b17644c28f28-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 01:45:05 crc kubenswrapper[4809]: I0930 01:45:05.352612 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r"] Sep 30 01:45:05 crc kubenswrapper[4809]: I0930 01:45:05.361950 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319900-85j5r"] Sep 30 01:45:05 crc kubenswrapper[4809]: I0930 01:45:05.690557 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:45:05 crc kubenswrapper[4809]: E0930 01:45:05.691212 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:45:05 crc kubenswrapper[4809]: I0930 01:45:05.707848 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fafdae5-fcc8-4f5f-8d7b-6df239e86be5" path="/var/lib/kubelet/pods/5fafdae5-fcc8-4f5f-8d7b-6df239e86be5/volumes" Sep 30 01:45:19 crc kubenswrapper[4809]: I0930 01:45:19.436139 4809 scope.go:117] "RemoveContainer" containerID="7dffcffae20f5546d1226bcd2fae51725deb6f04eb77fc89604bd970fb1da4b9" Sep 30 01:45:19 crc kubenswrapper[4809]: I0930 01:45:19.708696 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:45:19 crc kubenswrapper[4809]: E0930 01:45:19.709420 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:45:34 crc kubenswrapper[4809]: I0930 01:45:34.690936 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:45:34 crc kubenswrapper[4809]: E0930 01:45:34.692309 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:45:47 crc kubenswrapper[4809]: I0930 01:45:47.691942 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:45:47 crc kubenswrapper[4809]: E0930 01:45:47.692946 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:46:01 crc kubenswrapper[4809]: I0930 01:46:01.691283 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:46:01 crc kubenswrapper[4809]: E0930 01:46:01.692576 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:46:13 crc kubenswrapper[4809]: I0930 01:46:13.691341 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:46:13 crc kubenswrapper[4809]: E0930 01:46:13.692218 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:46:24 crc kubenswrapper[4809]: I0930 01:46:24.690820 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:46:24 crc kubenswrapper[4809]: E0930 01:46:24.691670 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:46:37 crc kubenswrapper[4809]: I0930 01:46:37.692207 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:46:37 crc kubenswrapper[4809]: E0930 01:46:37.693587 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:46:49 crc kubenswrapper[4809]: I0930 01:46:49.698995 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:46:49 crc kubenswrapper[4809]: E0930 01:46:49.700098 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:47:01 crc kubenswrapper[4809]: I0930 01:47:01.692464 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:47:01 crc kubenswrapper[4809]: E0930 01:47:01.695411 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:47:14 crc kubenswrapper[4809]: I0930 01:47:14.691530 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:47:14 crc kubenswrapper[4809]: E0930 01:47:14.695171 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:47:29 crc kubenswrapper[4809]: I0930 01:47:29.707266 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:47:29 crc kubenswrapper[4809]: E0930 01:47:29.708729 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:47:40 crc kubenswrapper[4809]: I0930 01:47:40.706265 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:47:40 crc kubenswrapper[4809]: E0930 01:47:40.707273 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:47:55 crc kubenswrapper[4809]: I0930 01:47:55.692132 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:47:55 crc kubenswrapper[4809]: E0930 01:47:55.693021 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:48:10 crc kubenswrapper[4809]: I0930 01:48:10.691156 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:48:10 crc kubenswrapper[4809]: E0930 01:48:10.691900 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:48:25 crc kubenswrapper[4809]: I0930 01:48:25.691264 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:48:25 crc kubenswrapper[4809]: E0930 01:48:25.692273 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:48:39 crc kubenswrapper[4809]: I0930 01:48:39.712170 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:48:39 crc kubenswrapper[4809]: E0930 01:48:39.713418 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:48:46 crc kubenswrapper[4809]: I0930 01:48:46.995443 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2lgp2"] Sep 30 01:48:46 crc kubenswrapper[4809]: E0930 01:48:46.996893 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="043d1b95-3e33-4acf-a8d9-b17644c28f28" containerName="collect-profiles" Sep 30 01:48:46 crc kubenswrapper[4809]: I0930 01:48:46.996911 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="043d1b95-3e33-4acf-a8d9-b17644c28f28" containerName="collect-profiles" Sep 30 01:48:46 crc kubenswrapper[4809]: I0930 01:48:46.997178 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="043d1b95-3e33-4acf-a8d9-b17644c28f28" containerName="collect-profiles" Sep 30 01:48:46 crc kubenswrapper[4809]: I0930 01:48:46.999291 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2lgp2" Sep 30 01:48:47 crc kubenswrapper[4809]: I0930 01:48:47.044363 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2lgp2"] Sep 30 01:48:47 crc kubenswrapper[4809]: I0930 01:48:47.056272 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d559594-55e7-49f0-8be3-e81319cd5cd1-utilities\") pod \"certified-operators-2lgp2\" (UID: \"4d559594-55e7-49f0-8be3-e81319cd5cd1\") " pod="openshift-marketplace/certified-operators-2lgp2" Sep 30 01:48:47 crc kubenswrapper[4809]: I0930 01:48:47.056396 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d559594-55e7-49f0-8be3-e81319cd5cd1-catalog-content\") pod \"certified-operators-2lgp2\" (UID: \"4d559594-55e7-49f0-8be3-e81319cd5cd1\") " pod="openshift-marketplace/certified-operators-2lgp2" Sep 30 01:48:47 crc kubenswrapper[4809]: I0930 01:48:47.056460 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lbd8\" (UniqueName: \"kubernetes.io/projected/4d559594-55e7-49f0-8be3-e81319cd5cd1-kube-api-access-6lbd8\") pod \"certified-operators-2lgp2\" (UID: \"4d559594-55e7-49f0-8be3-e81319cd5cd1\") " pod="openshift-marketplace/certified-operators-2lgp2" Sep 30 01:48:47 crc kubenswrapper[4809]: I0930 01:48:47.158754 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d559594-55e7-49f0-8be3-e81319cd5cd1-utilities\") pod \"certified-operators-2lgp2\" (UID: \"4d559594-55e7-49f0-8be3-e81319cd5cd1\") " pod="openshift-marketplace/certified-operators-2lgp2" Sep 30 01:48:47 crc kubenswrapper[4809]: I0930 01:48:47.158838 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d559594-55e7-49f0-8be3-e81319cd5cd1-catalog-content\") pod \"certified-operators-2lgp2\" (UID: \"4d559594-55e7-49f0-8be3-e81319cd5cd1\") " pod="openshift-marketplace/certified-operators-2lgp2" Sep 30 01:48:47 crc kubenswrapper[4809]: I0930 01:48:47.158884 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lbd8\" (UniqueName: \"kubernetes.io/projected/4d559594-55e7-49f0-8be3-e81319cd5cd1-kube-api-access-6lbd8\") pod \"certified-operators-2lgp2\" (UID: \"4d559594-55e7-49f0-8be3-e81319cd5cd1\") " pod="openshift-marketplace/certified-operators-2lgp2" Sep 30 01:48:47 crc kubenswrapper[4809]: I0930 01:48:47.159502 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d559594-55e7-49f0-8be3-e81319cd5cd1-utilities\") pod \"certified-operators-2lgp2\" (UID: \"4d559594-55e7-49f0-8be3-e81319cd5cd1\") " pod="openshift-marketplace/certified-operators-2lgp2" Sep 30 01:48:47 crc kubenswrapper[4809]: I0930 01:48:47.159513 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d559594-55e7-49f0-8be3-e81319cd5cd1-catalog-content\") pod \"certified-operators-2lgp2\" (UID: \"4d559594-55e7-49f0-8be3-e81319cd5cd1\") " pod="openshift-marketplace/certified-operators-2lgp2" Sep 30 01:48:47 crc kubenswrapper[4809]: I0930 01:48:47.185723 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lbd8\" (UniqueName: \"kubernetes.io/projected/4d559594-55e7-49f0-8be3-e81319cd5cd1-kube-api-access-6lbd8\") pod \"certified-operators-2lgp2\" (UID: \"4d559594-55e7-49f0-8be3-e81319cd5cd1\") " pod="openshift-marketplace/certified-operators-2lgp2" Sep 30 01:48:47 crc kubenswrapper[4809]: I0930 01:48:47.366045 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2lgp2" Sep 30 01:48:47 crc kubenswrapper[4809]: I0930 01:48:47.901131 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2lgp2"] Sep 30 01:48:48 crc kubenswrapper[4809]: I0930 01:48:48.119028 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2lgp2" event={"ID":"4d559594-55e7-49f0-8be3-e81319cd5cd1","Type":"ContainerStarted","Data":"9da6a66bc3df091278aa7d6c50d0397df84963bd6c7bef5c9110fb5df4ab1b19"} Sep 30 01:48:49 crc kubenswrapper[4809]: I0930 01:48:49.133463 4809 generic.go:334] "Generic (PLEG): container finished" podID="4d559594-55e7-49f0-8be3-e81319cd5cd1" containerID="0cd0d0f54f1f27a84e93b0c29284b843df93cab98968efccafc25730f76230af" exitCode=0 Sep 30 01:48:49 crc kubenswrapper[4809]: I0930 01:48:49.133529 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2lgp2" event={"ID":"4d559594-55e7-49f0-8be3-e81319cd5cd1","Type":"ContainerDied","Data":"0cd0d0f54f1f27a84e93b0c29284b843df93cab98968efccafc25730f76230af"} Sep 30 01:48:49 crc kubenswrapper[4809]: I0930 01:48:49.137136 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 01:48:50 crc kubenswrapper[4809]: I0930 01:48:50.143625 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2lgp2" event={"ID":"4d559594-55e7-49f0-8be3-e81319cd5cd1","Type":"ContainerStarted","Data":"17df577078578212bbe2b55b090d7821da7b4716a70526e311883986ab26f713"} Sep 30 01:48:52 crc kubenswrapper[4809]: I0930 01:48:52.166627 4809 generic.go:334] "Generic (PLEG): container finished" podID="4d559594-55e7-49f0-8be3-e81319cd5cd1" containerID="17df577078578212bbe2b55b090d7821da7b4716a70526e311883986ab26f713" exitCode=0 Sep 30 01:48:52 crc kubenswrapper[4809]: I0930 01:48:52.166670 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2lgp2" event={"ID":"4d559594-55e7-49f0-8be3-e81319cd5cd1","Type":"ContainerDied","Data":"17df577078578212bbe2b55b090d7821da7b4716a70526e311883986ab26f713"} Sep 30 01:48:53 crc kubenswrapper[4809]: I0930 01:48:53.179421 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2lgp2" event={"ID":"4d559594-55e7-49f0-8be3-e81319cd5cd1","Type":"ContainerStarted","Data":"8f485d679d9e55c0f561ed7e24472a2043ad3a30b53696eaf98f65b0473c0571"} Sep 30 01:48:53 crc kubenswrapper[4809]: I0930 01:48:53.205256 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2lgp2" podStartSLOduration=3.607965446 podStartE2EDuration="7.205211123s" podCreationTimestamp="2025-09-30 01:48:46 +0000 UTC" firstStartedPulling="2025-09-30 01:48:49.136609595 +0000 UTC m=+5980.172859013" lastFinishedPulling="2025-09-30 01:48:52.733855242 +0000 UTC m=+5983.770104690" observedRunningTime="2025-09-30 01:48:53.203262289 +0000 UTC m=+5984.239511737" watchObservedRunningTime="2025-09-30 01:48:53.205211123 +0000 UTC m=+5984.241460541" Sep 30 01:48:54 crc kubenswrapper[4809]: I0930 01:48:54.690936 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:48:54 crc kubenswrapper[4809]: E0930 01:48:54.691596 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:48:57 crc kubenswrapper[4809]: I0930 01:48:57.366590 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2lgp2" Sep 30 01:48:57 crc kubenswrapper[4809]: I0930 01:48:57.367476 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2lgp2" Sep 30 01:48:57 crc kubenswrapper[4809]: I0930 01:48:57.440374 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2lgp2" Sep 30 01:48:58 crc kubenswrapper[4809]: I0930 01:48:58.355667 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2lgp2" Sep 30 01:48:58 crc kubenswrapper[4809]: I0930 01:48:58.422301 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2lgp2"] Sep 30 01:49:00 crc kubenswrapper[4809]: I0930 01:49:00.343881 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2lgp2" podUID="4d559594-55e7-49f0-8be3-e81319cd5cd1" containerName="registry-server" containerID="cri-o://8f485d679d9e55c0f561ed7e24472a2043ad3a30b53696eaf98f65b0473c0571" gracePeriod=2 Sep 30 01:49:00 crc kubenswrapper[4809]: I0930 01:49:00.905348 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2lgp2" Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.018514 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lbd8\" (UniqueName: \"kubernetes.io/projected/4d559594-55e7-49f0-8be3-e81319cd5cd1-kube-api-access-6lbd8\") pod \"4d559594-55e7-49f0-8be3-e81319cd5cd1\" (UID: \"4d559594-55e7-49f0-8be3-e81319cd5cd1\") " Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.018625 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d559594-55e7-49f0-8be3-e81319cd5cd1-catalog-content\") pod \"4d559594-55e7-49f0-8be3-e81319cd5cd1\" (UID: \"4d559594-55e7-49f0-8be3-e81319cd5cd1\") " Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.018760 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d559594-55e7-49f0-8be3-e81319cd5cd1-utilities\") pod \"4d559594-55e7-49f0-8be3-e81319cd5cd1\" (UID: \"4d559594-55e7-49f0-8be3-e81319cd5cd1\") " Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.035630 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d559594-55e7-49f0-8be3-e81319cd5cd1-utilities" (OuterVolumeSpecName: "utilities") pod "4d559594-55e7-49f0-8be3-e81319cd5cd1" (UID: "4d559594-55e7-49f0-8be3-e81319cd5cd1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.042969 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d559594-55e7-49f0-8be3-e81319cd5cd1-kube-api-access-6lbd8" (OuterVolumeSpecName: "kube-api-access-6lbd8") pod "4d559594-55e7-49f0-8be3-e81319cd5cd1" (UID: "4d559594-55e7-49f0-8be3-e81319cd5cd1"). InnerVolumeSpecName "kube-api-access-6lbd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.069124 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d559594-55e7-49f0-8be3-e81319cd5cd1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4d559594-55e7-49f0-8be3-e81319cd5cd1" (UID: "4d559594-55e7-49f0-8be3-e81319cd5cd1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.122207 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d559594-55e7-49f0-8be3-e81319cd5cd1-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.122551 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lbd8\" (UniqueName: \"kubernetes.io/projected/4d559594-55e7-49f0-8be3-e81319cd5cd1-kube-api-access-6lbd8\") on node \"crc\" DevicePath \"\"" Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.122563 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d559594-55e7-49f0-8be3-e81319cd5cd1-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.357451 4809 generic.go:334] "Generic (PLEG): container finished" podID="4d559594-55e7-49f0-8be3-e81319cd5cd1" containerID="8f485d679d9e55c0f561ed7e24472a2043ad3a30b53696eaf98f65b0473c0571" exitCode=0 Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.357527 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2lgp2" event={"ID":"4d559594-55e7-49f0-8be3-e81319cd5cd1","Type":"ContainerDied","Data":"8f485d679d9e55c0f561ed7e24472a2043ad3a30b53696eaf98f65b0473c0571"} Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.357555 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2lgp2" event={"ID":"4d559594-55e7-49f0-8be3-e81319cd5cd1","Type":"ContainerDied","Data":"9da6a66bc3df091278aa7d6c50d0397df84963bd6c7bef5c9110fb5df4ab1b19"} Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.357557 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2lgp2" Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.357576 4809 scope.go:117] "RemoveContainer" containerID="8f485d679d9e55c0f561ed7e24472a2043ad3a30b53696eaf98f65b0473c0571" Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.398945 4809 scope.go:117] "RemoveContainer" containerID="17df577078578212bbe2b55b090d7821da7b4716a70526e311883986ab26f713" Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.410508 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2lgp2"] Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.434668 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2lgp2"] Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.438712 4809 scope.go:117] "RemoveContainer" containerID="0cd0d0f54f1f27a84e93b0c29284b843df93cab98968efccafc25730f76230af" Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.496505 4809 scope.go:117] "RemoveContainer" containerID="8f485d679d9e55c0f561ed7e24472a2043ad3a30b53696eaf98f65b0473c0571" Sep 30 01:49:01 crc kubenswrapper[4809]: E0930 01:49:01.497215 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f485d679d9e55c0f561ed7e24472a2043ad3a30b53696eaf98f65b0473c0571\": container with ID starting with 8f485d679d9e55c0f561ed7e24472a2043ad3a30b53696eaf98f65b0473c0571 not found: ID does not exist" containerID="8f485d679d9e55c0f561ed7e24472a2043ad3a30b53696eaf98f65b0473c0571" Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.497255 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f485d679d9e55c0f561ed7e24472a2043ad3a30b53696eaf98f65b0473c0571"} err="failed to get container status \"8f485d679d9e55c0f561ed7e24472a2043ad3a30b53696eaf98f65b0473c0571\": rpc error: code = NotFound desc = could not find container \"8f485d679d9e55c0f561ed7e24472a2043ad3a30b53696eaf98f65b0473c0571\": container with ID starting with 8f485d679d9e55c0f561ed7e24472a2043ad3a30b53696eaf98f65b0473c0571 not found: ID does not exist" Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.497301 4809 scope.go:117] "RemoveContainer" containerID="17df577078578212bbe2b55b090d7821da7b4716a70526e311883986ab26f713" Sep 30 01:49:01 crc kubenswrapper[4809]: E0930 01:49:01.499488 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17df577078578212bbe2b55b090d7821da7b4716a70526e311883986ab26f713\": container with ID starting with 17df577078578212bbe2b55b090d7821da7b4716a70526e311883986ab26f713 not found: ID does not exist" containerID="17df577078578212bbe2b55b090d7821da7b4716a70526e311883986ab26f713" Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.499773 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17df577078578212bbe2b55b090d7821da7b4716a70526e311883986ab26f713"} err="failed to get container status \"17df577078578212bbe2b55b090d7821da7b4716a70526e311883986ab26f713\": rpc error: code = NotFound desc = could not find container \"17df577078578212bbe2b55b090d7821da7b4716a70526e311883986ab26f713\": container with ID starting with 17df577078578212bbe2b55b090d7821da7b4716a70526e311883986ab26f713 not found: ID does not exist" Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.499963 4809 scope.go:117] "RemoveContainer" containerID="0cd0d0f54f1f27a84e93b0c29284b843df93cab98968efccafc25730f76230af" Sep 30 01:49:01 crc kubenswrapper[4809]: E0930 01:49:01.500666 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cd0d0f54f1f27a84e93b0c29284b843df93cab98968efccafc25730f76230af\": container with ID starting with 0cd0d0f54f1f27a84e93b0c29284b843df93cab98968efccafc25730f76230af not found: ID does not exist" containerID="0cd0d0f54f1f27a84e93b0c29284b843df93cab98968efccafc25730f76230af" Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.500700 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cd0d0f54f1f27a84e93b0c29284b843df93cab98968efccafc25730f76230af"} err="failed to get container status \"0cd0d0f54f1f27a84e93b0c29284b843df93cab98968efccafc25730f76230af\": rpc error: code = NotFound desc = could not find container \"0cd0d0f54f1f27a84e93b0c29284b843df93cab98968efccafc25730f76230af\": container with ID starting with 0cd0d0f54f1f27a84e93b0c29284b843df93cab98968efccafc25730f76230af not found: ID does not exist" Sep 30 01:49:01 crc kubenswrapper[4809]: I0930 01:49:01.712605 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d559594-55e7-49f0-8be3-e81319cd5cd1" path="/var/lib/kubelet/pods/4d559594-55e7-49f0-8be3-e81319cd5cd1/volumes" Sep 30 01:49:09 crc kubenswrapper[4809]: I0930 01:49:09.707644 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:49:10 crc kubenswrapper[4809]: I0930 01:49:10.480122 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"bced84f5dab0f31b68f51c4794d1a1924b4390fb6ef61c6ae88443c3c03aeee8"} Sep 30 01:49:12 crc kubenswrapper[4809]: E0930 01:49:12.795604 4809 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.129.56.24:53698->38.129.56.24:38341: write tcp 38.129.56.24:53698->38.129.56.24:38341: write: broken pipe Sep 30 01:49:34 crc kubenswrapper[4809]: I0930 01:49:34.782152 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bgsxn"] Sep 30 01:49:34 crc kubenswrapper[4809]: E0930 01:49:34.783259 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d559594-55e7-49f0-8be3-e81319cd5cd1" containerName="registry-server" Sep 30 01:49:34 crc kubenswrapper[4809]: I0930 01:49:34.783276 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d559594-55e7-49f0-8be3-e81319cd5cd1" containerName="registry-server" Sep 30 01:49:34 crc kubenswrapper[4809]: E0930 01:49:34.783292 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d559594-55e7-49f0-8be3-e81319cd5cd1" containerName="extract-utilities" Sep 30 01:49:34 crc kubenswrapper[4809]: I0930 01:49:34.783301 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d559594-55e7-49f0-8be3-e81319cd5cd1" containerName="extract-utilities" Sep 30 01:49:34 crc kubenswrapper[4809]: E0930 01:49:34.783339 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d559594-55e7-49f0-8be3-e81319cd5cd1" containerName="extract-content" Sep 30 01:49:34 crc kubenswrapper[4809]: I0930 01:49:34.783347 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d559594-55e7-49f0-8be3-e81319cd5cd1" containerName="extract-content" Sep 30 01:49:34 crc kubenswrapper[4809]: I0930 01:49:34.783692 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d559594-55e7-49f0-8be3-e81319cd5cd1" containerName="registry-server" Sep 30 01:49:34 crc kubenswrapper[4809]: I0930 01:49:34.785762 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bgsxn" Sep 30 01:49:34 crc kubenswrapper[4809]: I0930 01:49:34.823093 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bgsxn"] Sep 30 01:49:34 crc kubenswrapper[4809]: I0930 01:49:34.935783 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49fw6\" (UniqueName: \"kubernetes.io/projected/aaa044de-be5d-46a2-b4c8-91970250a5eb-kube-api-access-49fw6\") pod \"community-operators-bgsxn\" (UID: \"aaa044de-be5d-46a2-b4c8-91970250a5eb\") " pod="openshift-marketplace/community-operators-bgsxn" Sep 30 01:49:34 crc kubenswrapper[4809]: I0930 01:49:34.936634 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aaa044de-be5d-46a2-b4c8-91970250a5eb-utilities\") pod \"community-operators-bgsxn\" (UID: \"aaa044de-be5d-46a2-b4c8-91970250a5eb\") " pod="openshift-marketplace/community-operators-bgsxn" Sep 30 01:49:34 crc kubenswrapper[4809]: I0930 01:49:34.936990 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aaa044de-be5d-46a2-b4c8-91970250a5eb-catalog-content\") pod \"community-operators-bgsxn\" (UID: \"aaa044de-be5d-46a2-b4c8-91970250a5eb\") " pod="openshift-marketplace/community-operators-bgsxn" Sep 30 01:49:35 crc kubenswrapper[4809]: I0930 01:49:35.039203 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aaa044de-be5d-46a2-b4c8-91970250a5eb-catalog-content\") pod \"community-operators-bgsxn\" (UID: \"aaa044de-be5d-46a2-b4c8-91970250a5eb\") " pod="openshift-marketplace/community-operators-bgsxn" Sep 30 01:49:35 crc kubenswrapper[4809]: I0930 01:49:35.039368 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49fw6\" (UniqueName: \"kubernetes.io/projected/aaa044de-be5d-46a2-b4c8-91970250a5eb-kube-api-access-49fw6\") pod \"community-operators-bgsxn\" (UID: \"aaa044de-be5d-46a2-b4c8-91970250a5eb\") " pod="openshift-marketplace/community-operators-bgsxn" Sep 30 01:49:35 crc kubenswrapper[4809]: I0930 01:49:35.039410 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aaa044de-be5d-46a2-b4c8-91970250a5eb-utilities\") pod \"community-operators-bgsxn\" (UID: \"aaa044de-be5d-46a2-b4c8-91970250a5eb\") " pod="openshift-marketplace/community-operators-bgsxn" Sep 30 01:49:35 crc kubenswrapper[4809]: I0930 01:49:35.039911 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aaa044de-be5d-46a2-b4c8-91970250a5eb-catalog-content\") pod \"community-operators-bgsxn\" (UID: \"aaa044de-be5d-46a2-b4c8-91970250a5eb\") " pod="openshift-marketplace/community-operators-bgsxn" Sep 30 01:49:35 crc kubenswrapper[4809]: I0930 01:49:35.039926 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aaa044de-be5d-46a2-b4c8-91970250a5eb-utilities\") pod \"community-operators-bgsxn\" (UID: \"aaa044de-be5d-46a2-b4c8-91970250a5eb\") " pod="openshift-marketplace/community-operators-bgsxn" Sep 30 01:49:35 crc kubenswrapper[4809]: I0930 01:49:35.068253 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49fw6\" (UniqueName: \"kubernetes.io/projected/aaa044de-be5d-46a2-b4c8-91970250a5eb-kube-api-access-49fw6\") pod \"community-operators-bgsxn\" (UID: \"aaa044de-be5d-46a2-b4c8-91970250a5eb\") " pod="openshift-marketplace/community-operators-bgsxn" Sep 30 01:49:35 crc kubenswrapper[4809]: I0930 01:49:35.107190 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bgsxn" Sep 30 01:49:35 crc kubenswrapper[4809]: I0930 01:49:35.707129 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bgsxn"] Sep 30 01:49:35 crc kubenswrapper[4809]: I0930 01:49:35.758928 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bgsxn" event={"ID":"aaa044de-be5d-46a2-b4c8-91970250a5eb","Type":"ContainerStarted","Data":"6bb0b0f24b4d5c471c473375270b91d3c3c4c4d7bd4ef0e905ce40e9c4863fe7"} Sep 30 01:49:36 crc kubenswrapper[4809]: I0930 01:49:36.801339 4809 generic.go:334] "Generic (PLEG): container finished" podID="aaa044de-be5d-46a2-b4c8-91970250a5eb" containerID="772e21445d06c82aae6b2ae35c35ee63fa279aa9fc6b30b3010e180fc8134f61" exitCode=0 Sep 30 01:49:36 crc kubenswrapper[4809]: I0930 01:49:36.801389 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bgsxn" event={"ID":"aaa044de-be5d-46a2-b4c8-91970250a5eb","Type":"ContainerDied","Data":"772e21445d06c82aae6b2ae35c35ee63fa279aa9fc6b30b3010e180fc8134f61"} Sep 30 01:49:37 crc kubenswrapper[4809]: I0930 01:49:37.813132 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bgsxn" event={"ID":"aaa044de-be5d-46a2-b4c8-91970250a5eb","Type":"ContainerStarted","Data":"25351f50d41ad58510fceffe0904b61e0fdf9f6f25d32ad167f7d937abb04468"} Sep 30 01:49:38 crc kubenswrapper[4809]: I0930 01:49:38.835259 4809 generic.go:334] "Generic (PLEG): container finished" podID="aaa044de-be5d-46a2-b4c8-91970250a5eb" containerID="25351f50d41ad58510fceffe0904b61e0fdf9f6f25d32ad167f7d937abb04468" exitCode=0 Sep 30 01:49:38 crc kubenswrapper[4809]: I0930 01:49:38.835318 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bgsxn" event={"ID":"aaa044de-be5d-46a2-b4c8-91970250a5eb","Type":"ContainerDied","Data":"25351f50d41ad58510fceffe0904b61e0fdf9f6f25d32ad167f7d937abb04468"} Sep 30 01:49:39 crc kubenswrapper[4809]: I0930 01:49:39.872418 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bgsxn" event={"ID":"aaa044de-be5d-46a2-b4c8-91970250a5eb","Type":"ContainerStarted","Data":"9c17fe12d38bf0ff068bb516b871fb321b90b6955b00cd72ad18505db291de12"} Sep 30 01:49:39 crc kubenswrapper[4809]: I0930 01:49:39.900693 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bgsxn" podStartSLOduration=3.314351901 podStartE2EDuration="5.900669157s" podCreationTimestamp="2025-09-30 01:49:34 +0000 UTC" firstStartedPulling="2025-09-30 01:49:36.803608846 +0000 UTC m=+6027.839858254" lastFinishedPulling="2025-09-30 01:49:39.389926062 +0000 UTC m=+6030.426175510" observedRunningTime="2025-09-30 01:49:39.897209503 +0000 UTC m=+6030.933458921" watchObservedRunningTime="2025-09-30 01:49:39.900669157 +0000 UTC m=+6030.936918575" Sep 30 01:49:45 crc kubenswrapper[4809]: I0930 01:49:45.107625 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bgsxn" Sep 30 01:49:45 crc kubenswrapper[4809]: I0930 01:49:45.108210 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bgsxn" Sep 30 01:49:45 crc kubenswrapper[4809]: I0930 01:49:45.184464 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bgsxn" Sep 30 01:49:46 crc kubenswrapper[4809]: I0930 01:49:46.014376 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bgsxn" Sep 30 01:49:46 crc kubenswrapper[4809]: I0930 01:49:46.082666 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bgsxn"] Sep 30 01:49:47 crc kubenswrapper[4809]: I0930 01:49:47.958748 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bgsxn" podUID="aaa044de-be5d-46a2-b4c8-91970250a5eb" containerName="registry-server" containerID="cri-o://9c17fe12d38bf0ff068bb516b871fb321b90b6955b00cd72ad18505db291de12" gracePeriod=2 Sep 30 01:49:48 crc kubenswrapper[4809]: I0930 01:49:48.592076 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bgsxn" Sep 30 01:49:48 crc kubenswrapper[4809]: I0930 01:49:48.789219 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49fw6\" (UniqueName: \"kubernetes.io/projected/aaa044de-be5d-46a2-b4c8-91970250a5eb-kube-api-access-49fw6\") pod \"aaa044de-be5d-46a2-b4c8-91970250a5eb\" (UID: \"aaa044de-be5d-46a2-b4c8-91970250a5eb\") " Sep 30 01:49:48 crc kubenswrapper[4809]: I0930 01:49:48.789496 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aaa044de-be5d-46a2-b4c8-91970250a5eb-utilities\") pod \"aaa044de-be5d-46a2-b4c8-91970250a5eb\" (UID: \"aaa044de-be5d-46a2-b4c8-91970250a5eb\") " Sep 30 01:49:48 crc kubenswrapper[4809]: I0930 01:49:48.789594 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aaa044de-be5d-46a2-b4c8-91970250a5eb-catalog-content\") pod \"aaa044de-be5d-46a2-b4c8-91970250a5eb\" (UID: \"aaa044de-be5d-46a2-b4c8-91970250a5eb\") " Sep 30 01:49:48 crc kubenswrapper[4809]: I0930 01:49:48.791032 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aaa044de-be5d-46a2-b4c8-91970250a5eb-utilities" (OuterVolumeSpecName: "utilities") pod "aaa044de-be5d-46a2-b4c8-91970250a5eb" (UID: "aaa044de-be5d-46a2-b4c8-91970250a5eb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:49:48 crc kubenswrapper[4809]: I0930 01:49:48.798814 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aaa044de-be5d-46a2-b4c8-91970250a5eb-kube-api-access-49fw6" (OuterVolumeSpecName: "kube-api-access-49fw6") pod "aaa044de-be5d-46a2-b4c8-91970250a5eb" (UID: "aaa044de-be5d-46a2-b4c8-91970250a5eb"). InnerVolumeSpecName "kube-api-access-49fw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:49:48 crc kubenswrapper[4809]: I0930 01:49:48.853497 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aaa044de-be5d-46a2-b4c8-91970250a5eb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aaa044de-be5d-46a2-b4c8-91970250a5eb" (UID: "aaa044de-be5d-46a2-b4c8-91970250a5eb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:49:48 crc kubenswrapper[4809]: I0930 01:49:48.893365 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49fw6\" (UniqueName: \"kubernetes.io/projected/aaa044de-be5d-46a2-b4c8-91970250a5eb-kube-api-access-49fw6\") on node \"crc\" DevicePath \"\"" Sep 30 01:49:48 crc kubenswrapper[4809]: I0930 01:49:48.893395 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aaa044de-be5d-46a2-b4c8-91970250a5eb-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:49:48 crc kubenswrapper[4809]: I0930 01:49:48.893404 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aaa044de-be5d-46a2-b4c8-91970250a5eb-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:49:48 crc kubenswrapper[4809]: I0930 01:49:48.977844 4809 generic.go:334] "Generic (PLEG): container finished" podID="aaa044de-be5d-46a2-b4c8-91970250a5eb" containerID="9c17fe12d38bf0ff068bb516b871fb321b90b6955b00cd72ad18505db291de12" exitCode=0 Sep 30 01:49:48 crc kubenswrapper[4809]: I0930 01:49:48.977907 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bgsxn" event={"ID":"aaa044de-be5d-46a2-b4c8-91970250a5eb","Type":"ContainerDied","Data":"9c17fe12d38bf0ff068bb516b871fb321b90b6955b00cd72ad18505db291de12"} Sep 30 01:49:48 crc kubenswrapper[4809]: I0930 01:49:48.977947 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bgsxn" event={"ID":"aaa044de-be5d-46a2-b4c8-91970250a5eb","Type":"ContainerDied","Data":"6bb0b0f24b4d5c471c473375270b91d3c3c4c4d7bd4ef0e905ce40e9c4863fe7"} Sep 30 01:49:48 crc kubenswrapper[4809]: I0930 01:49:48.977977 4809 scope.go:117] "RemoveContainer" containerID="9c17fe12d38bf0ff068bb516b871fb321b90b6955b00cd72ad18505db291de12" Sep 30 01:49:48 crc kubenswrapper[4809]: I0930 01:49:48.978185 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bgsxn" Sep 30 01:49:49 crc kubenswrapper[4809]: I0930 01:49:49.040174 4809 scope.go:117] "RemoveContainer" containerID="25351f50d41ad58510fceffe0904b61e0fdf9f6f25d32ad167f7d937abb04468" Sep 30 01:49:49 crc kubenswrapper[4809]: I0930 01:49:49.066166 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bgsxn"] Sep 30 01:49:49 crc kubenswrapper[4809]: I0930 01:49:49.086601 4809 scope.go:117] "RemoveContainer" containerID="772e21445d06c82aae6b2ae35c35ee63fa279aa9fc6b30b3010e180fc8134f61" Sep 30 01:49:49 crc kubenswrapper[4809]: I0930 01:49:49.090981 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bgsxn"] Sep 30 01:49:49 crc kubenswrapper[4809]: I0930 01:49:49.143581 4809 scope.go:117] "RemoveContainer" containerID="9c17fe12d38bf0ff068bb516b871fb321b90b6955b00cd72ad18505db291de12" Sep 30 01:49:49 crc kubenswrapper[4809]: E0930 01:49:49.144331 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c17fe12d38bf0ff068bb516b871fb321b90b6955b00cd72ad18505db291de12\": container with ID starting with 9c17fe12d38bf0ff068bb516b871fb321b90b6955b00cd72ad18505db291de12 not found: ID does not exist" containerID="9c17fe12d38bf0ff068bb516b871fb321b90b6955b00cd72ad18505db291de12" Sep 30 01:49:49 crc kubenswrapper[4809]: I0930 01:49:49.144385 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c17fe12d38bf0ff068bb516b871fb321b90b6955b00cd72ad18505db291de12"} err="failed to get container status \"9c17fe12d38bf0ff068bb516b871fb321b90b6955b00cd72ad18505db291de12\": rpc error: code = NotFound desc = could not find container \"9c17fe12d38bf0ff068bb516b871fb321b90b6955b00cd72ad18505db291de12\": container with ID starting with 9c17fe12d38bf0ff068bb516b871fb321b90b6955b00cd72ad18505db291de12 not found: ID does not exist" Sep 30 01:49:49 crc kubenswrapper[4809]: I0930 01:49:49.144410 4809 scope.go:117] "RemoveContainer" containerID="25351f50d41ad58510fceffe0904b61e0fdf9f6f25d32ad167f7d937abb04468" Sep 30 01:49:49 crc kubenswrapper[4809]: E0930 01:49:49.145072 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25351f50d41ad58510fceffe0904b61e0fdf9f6f25d32ad167f7d937abb04468\": container with ID starting with 25351f50d41ad58510fceffe0904b61e0fdf9f6f25d32ad167f7d937abb04468 not found: ID does not exist" containerID="25351f50d41ad58510fceffe0904b61e0fdf9f6f25d32ad167f7d937abb04468" Sep 30 01:49:49 crc kubenswrapper[4809]: I0930 01:49:49.145111 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25351f50d41ad58510fceffe0904b61e0fdf9f6f25d32ad167f7d937abb04468"} err="failed to get container status \"25351f50d41ad58510fceffe0904b61e0fdf9f6f25d32ad167f7d937abb04468\": rpc error: code = NotFound desc = could not find container \"25351f50d41ad58510fceffe0904b61e0fdf9f6f25d32ad167f7d937abb04468\": container with ID starting with 25351f50d41ad58510fceffe0904b61e0fdf9f6f25d32ad167f7d937abb04468 not found: ID does not exist" Sep 30 01:49:49 crc kubenswrapper[4809]: I0930 01:49:49.145152 4809 scope.go:117] "RemoveContainer" containerID="772e21445d06c82aae6b2ae35c35ee63fa279aa9fc6b30b3010e180fc8134f61" Sep 30 01:49:49 crc kubenswrapper[4809]: E0930 01:49:49.145674 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"772e21445d06c82aae6b2ae35c35ee63fa279aa9fc6b30b3010e180fc8134f61\": container with ID starting with 772e21445d06c82aae6b2ae35c35ee63fa279aa9fc6b30b3010e180fc8134f61 not found: ID does not exist" containerID="772e21445d06c82aae6b2ae35c35ee63fa279aa9fc6b30b3010e180fc8134f61" Sep 30 01:49:49 crc kubenswrapper[4809]: I0930 01:49:49.145720 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"772e21445d06c82aae6b2ae35c35ee63fa279aa9fc6b30b3010e180fc8134f61"} err="failed to get container status \"772e21445d06c82aae6b2ae35c35ee63fa279aa9fc6b30b3010e180fc8134f61\": rpc error: code = NotFound desc = could not find container \"772e21445d06c82aae6b2ae35c35ee63fa279aa9fc6b30b3010e180fc8134f61\": container with ID starting with 772e21445d06c82aae6b2ae35c35ee63fa279aa9fc6b30b3010e180fc8134f61 not found: ID does not exist" Sep 30 01:49:49 crc kubenswrapper[4809]: I0930 01:49:49.713378 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aaa044de-be5d-46a2-b4c8-91970250a5eb" path="/var/lib/kubelet/pods/aaa044de-be5d-46a2-b4c8-91970250a5eb/volumes" Sep 30 01:50:36 crc kubenswrapper[4809]: I0930 01:50:36.374389 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-x7qq7"] Sep 30 01:50:36 crc kubenswrapper[4809]: E0930 01:50:36.375891 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaa044de-be5d-46a2-b4c8-91970250a5eb" containerName="extract-utilities" Sep 30 01:50:36 crc kubenswrapper[4809]: I0930 01:50:36.375916 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaa044de-be5d-46a2-b4c8-91970250a5eb" containerName="extract-utilities" Sep 30 01:50:36 crc kubenswrapper[4809]: E0930 01:50:36.375942 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaa044de-be5d-46a2-b4c8-91970250a5eb" containerName="extract-content" Sep 30 01:50:36 crc kubenswrapper[4809]: I0930 01:50:36.375953 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaa044de-be5d-46a2-b4c8-91970250a5eb" containerName="extract-content" Sep 30 01:50:36 crc kubenswrapper[4809]: E0930 01:50:36.375996 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaa044de-be5d-46a2-b4c8-91970250a5eb" containerName="registry-server" Sep 30 01:50:36 crc kubenswrapper[4809]: I0930 01:50:36.376008 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaa044de-be5d-46a2-b4c8-91970250a5eb" containerName="registry-server" Sep 30 01:50:36 crc kubenswrapper[4809]: I0930 01:50:36.376303 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="aaa044de-be5d-46a2-b4c8-91970250a5eb" containerName="registry-server" Sep 30 01:50:36 crc kubenswrapper[4809]: I0930 01:50:36.378450 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x7qq7" Sep 30 01:50:36 crc kubenswrapper[4809]: I0930 01:50:36.393830 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-x7qq7"] Sep 30 01:50:36 crc kubenswrapper[4809]: I0930 01:50:36.501888 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/beb039f5-4ad1-49b5-8320-2fbc1e027846-utilities\") pod \"redhat-operators-x7qq7\" (UID: \"beb039f5-4ad1-49b5-8320-2fbc1e027846\") " pod="openshift-marketplace/redhat-operators-x7qq7" Sep 30 01:50:36 crc kubenswrapper[4809]: I0930 01:50:36.502021 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wp8sc\" (UniqueName: \"kubernetes.io/projected/beb039f5-4ad1-49b5-8320-2fbc1e027846-kube-api-access-wp8sc\") pod \"redhat-operators-x7qq7\" (UID: \"beb039f5-4ad1-49b5-8320-2fbc1e027846\") " pod="openshift-marketplace/redhat-operators-x7qq7" Sep 30 01:50:36 crc kubenswrapper[4809]: I0930 01:50:36.502102 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/beb039f5-4ad1-49b5-8320-2fbc1e027846-catalog-content\") pod \"redhat-operators-x7qq7\" (UID: \"beb039f5-4ad1-49b5-8320-2fbc1e027846\") " pod="openshift-marketplace/redhat-operators-x7qq7" Sep 30 01:50:36 crc kubenswrapper[4809]: I0930 01:50:36.603666 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/beb039f5-4ad1-49b5-8320-2fbc1e027846-catalog-content\") pod \"redhat-operators-x7qq7\" (UID: \"beb039f5-4ad1-49b5-8320-2fbc1e027846\") " pod="openshift-marketplace/redhat-operators-x7qq7" Sep 30 01:50:36 crc kubenswrapper[4809]: I0930 01:50:36.603799 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/beb039f5-4ad1-49b5-8320-2fbc1e027846-utilities\") pod \"redhat-operators-x7qq7\" (UID: \"beb039f5-4ad1-49b5-8320-2fbc1e027846\") " pod="openshift-marketplace/redhat-operators-x7qq7" Sep 30 01:50:36 crc kubenswrapper[4809]: I0930 01:50:36.603883 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wp8sc\" (UniqueName: \"kubernetes.io/projected/beb039f5-4ad1-49b5-8320-2fbc1e027846-kube-api-access-wp8sc\") pod \"redhat-operators-x7qq7\" (UID: \"beb039f5-4ad1-49b5-8320-2fbc1e027846\") " pod="openshift-marketplace/redhat-operators-x7qq7" Sep 30 01:50:36 crc kubenswrapper[4809]: I0930 01:50:36.604257 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/beb039f5-4ad1-49b5-8320-2fbc1e027846-catalog-content\") pod \"redhat-operators-x7qq7\" (UID: \"beb039f5-4ad1-49b5-8320-2fbc1e027846\") " pod="openshift-marketplace/redhat-operators-x7qq7" Sep 30 01:50:36 crc kubenswrapper[4809]: I0930 01:50:36.604269 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/beb039f5-4ad1-49b5-8320-2fbc1e027846-utilities\") pod \"redhat-operators-x7qq7\" (UID: \"beb039f5-4ad1-49b5-8320-2fbc1e027846\") " pod="openshift-marketplace/redhat-operators-x7qq7" Sep 30 01:50:36 crc kubenswrapper[4809]: I0930 01:50:36.654295 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wp8sc\" (UniqueName: \"kubernetes.io/projected/beb039f5-4ad1-49b5-8320-2fbc1e027846-kube-api-access-wp8sc\") pod \"redhat-operators-x7qq7\" (UID: \"beb039f5-4ad1-49b5-8320-2fbc1e027846\") " pod="openshift-marketplace/redhat-operators-x7qq7" Sep 30 01:50:36 crc kubenswrapper[4809]: I0930 01:50:36.719060 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x7qq7" Sep 30 01:50:37 crc kubenswrapper[4809]: I0930 01:50:37.244071 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-x7qq7"] Sep 30 01:50:37 crc kubenswrapper[4809]: I0930 01:50:37.533312 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7qq7" event={"ID":"beb039f5-4ad1-49b5-8320-2fbc1e027846","Type":"ContainerStarted","Data":"7c09bfa30194da2e3b4b4a99576d556f27e605208de2558f4620ff72be46481f"} Sep 30 01:50:37 crc kubenswrapper[4809]: I0930 01:50:37.533632 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7qq7" event={"ID":"beb039f5-4ad1-49b5-8320-2fbc1e027846","Type":"ContainerStarted","Data":"abdd925ad4bcb4108ab7318b144b02a436af375b97f873677dd1e1210ca96fed"} Sep 30 01:50:38 crc kubenswrapper[4809]: I0930 01:50:38.550007 4809 generic.go:334] "Generic (PLEG): container finished" podID="beb039f5-4ad1-49b5-8320-2fbc1e027846" containerID="7c09bfa30194da2e3b4b4a99576d556f27e605208de2558f4620ff72be46481f" exitCode=0 Sep 30 01:50:38 crc kubenswrapper[4809]: I0930 01:50:38.550073 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7qq7" event={"ID":"beb039f5-4ad1-49b5-8320-2fbc1e027846","Type":"ContainerDied","Data":"7c09bfa30194da2e3b4b4a99576d556f27e605208de2558f4620ff72be46481f"} Sep 30 01:50:40 crc kubenswrapper[4809]: I0930 01:50:40.574766 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7qq7" event={"ID":"beb039f5-4ad1-49b5-8320-2fbc1e027846","Type":"ContainerStarted","Data":"3411d46d8965723397fd4858faddbbaa753814a42c8e1c0dbd2914dfe629e3c3"} Sep 30 01:50:43 crc kubenswrapper[4809]: I0930 01:50:43.543024 4809 patch_prober.go:28] interesting pod/logging-loki-gateway-575dfb8665-dnzjd container/gateway namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body={ Sep 30 01:50:43 crc kubenswrapper[4809]: "http": "returned status 503, expected 200" Sep 30 01:50:43 crc kubenswrapper[4809]: } Sep 30 01:50:43 crc kubenswrapper[4809]: I0930 01:50:43.543764 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-gateway-575dfb8665-dnzjd" podUID="694ba803-44df-4e1d-9236-c84411352efe" containerName="gateway" probeResult="failure" output="HTTP probe failed with statuscode: 503" Sep 30 01:50:43 crc kubenswrapper[4809]: I0930 01:50:43.614770 4809 generic.go:334] "Generic (PLEG): container finished" podID="beb039f5-4ad1-49b5-8320-2fbc1e027846" containerID="3411d46d8965723397fd4858faddbbaa753814a42c8e1c0dbd2914dfe629e3c3" exitCode=0 Sep 30 01:50:43 crc kubenswrapper[4809]: I0930 01:50:43.614845 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7qq7" event={"ID":"beb039f5-4ad1-49b5-8320-2fbc1e027846","Type":"ContainerDied","Data":"3411d46d8965723397fd4858faddbbaa753814a42c8e1c0dbd2914dfe629e3c3"} Sep 30 01:50:44 crc kubenswrapper[4809]: I0930 01:50:44.640824 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7qq7" event={"ID":"beb039f5-4ad1-49b5-8320-2fbc1e027846","Type":"ContainerStarted","Data":"3425cd35dc3422901407d74f87c5119c96386eb7f13b778e8f79c73c62119e00"} Sep 30 01:50:44 crc kubenswrapper[4809]: I0930 01:50:44.676575 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-x7qq7" podStartSLOduration=3.144261342 podStartE2EDuration="8.676542596s" podCreationTimestamp="2025-09-30 01:50:36 +0000 UTC" firstStartedPulling="2025-09-30 01:50:38.552203559 +0000 UTC m=+6089.588452977" lastFinishedPulling="2025-09-30 01:50:44.084484803 +0000 UTC m=+6095.120734231" observedRunningTime="2025-09-30 01:50:44.663220552 +0000 UTC m=+6095.699469970" watchObservedRunningTime="2025-09-30 01:50:44.676542596 +0000 UTC m=+6095.712792024" Sep 30 01:50:46 crc kubenswrapper[4809]: I0930 01:50:46.719974 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-x7qq7" Sep 30 01:50:46 crc kubenswrapper[4809]: I0930 01:50:46.720360 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-x7qq7" Sep 30 01:50:47 crc kubenswrapper[4809]: I0930 01:50:47.780419 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-x7qq7" podUID="beb039f5-4ad1-49b5-8320-2fbc1e027846" containerName="registry-server" probeResult="failure" output=< Sep 30 01:50:47 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 01:50:47 crc kubenswrapper[4809]: > Sep 30 01:50:56 crc kubenswrapper[4809]: I0930 01:50:56.766944 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-x7qq7" Sep 30 01:50:56 crc kubenswrapper[4809]: I0930 01:50:56.816040 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-x7qq7" Sep 30 01:50:57 crc kubenswrapper[4809]: I0930 01:50:57.004924 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-x7qq7"] Sep 30 01:50:57 crc kubenswrapper[4809]: I0930 01:50:57.798098 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-x7qq7" podUID="beb039f5-4ad1-49b5-8320-2fbc1e027846" containerName="registry-server" containerID="cri-o://3425cd35dc3422901407d74f87c5119c96386eb7f13b778e8f79c73c62119e00" gracePeriod=2 Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.344350 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x7qq7" Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.405761 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/beb039f5-4ad1-49b5-8320-2fbc1e027846-catalog-content\") pod \"beb039f5-4ad1-49b5-8320-2fbc1e027846\" (UID: \"beb039f5-4ad1-49b5-8320-2fbc1e027846\") " Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.405878 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wp8sc\" (UniqueName: \"kubernetes.io/projected/beb039f5-4ad1-49b5-8320-2fbc1e027846-kube-api-access-wp8sc\") pod \"beb039f5-4ad1-49b5-8320-2fbc1e027846\" (UID: \"beb039f5-4ad1-49b5-8320-2fbc1e027846\") " Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.405919 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/beb039f5-4ad1-49b5-8320-2fbc1e027846-utilities\") pod \"beb039f5-4ad1-49b5-8320-2fbc1e027846\" (UID: \"beb039f5-4ad1-49b5-8320-2fbc1e027846\") " Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.406926 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/beb039f5-4ad1-49b5-8320-2fbc1e027846-utilities" (OuterVolumeSpecName: "utilities") pod "beb039f5-4ad1-49b5-8320-2fbc1e027846" (UID: "beb039f5-4ad1-49b5-8320-2fbc1e027846"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.416138 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/beb039f5-4ad1-49b5-8320-2fbc1e027846-kube-api-access-wp8sc" (OuterVolumeSpecName: "kube-api-access-wp8sc") pod "beb039f5-4ad1-49b5-8320-2fbc1e027846" (UID: "beb039f5-4ad1-49b5-8320-2fbc1e027846"). InnerVolumeSpecName "kube-api-access-wp8sc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.500899 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/beb039f5-4ad1-49b5-8320-2fbc1e027846-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "beb039f5-4ad1-49b5-8320-2fbc1e027846" (UID: "beb039f5-4ad1-49b5-8320-2fbc1e027846"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.508998 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/beb039f5-4ad1-49b5-8320-2fbc1e027846-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.509072 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wp8sc\" (UniqueName: \"kubernetes.io/projected/beb039f5-4ad1-49b5-8320-2fbc1e027846-kube-api-access-wp8sc\") on node \"crc\" DevicePath \"\"" Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.509091 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/beb039f5-4ad1-49b5-8320-2fbc1e027846-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.815168 4809 generic.go:334] "Generic (PLEG): container finished" podID="beb039f5-4ad1-49b5-8320-2fbc1e027846" containerID="3425cd35dc3422901407d74f87c5119c96386eb7f13b778e8f79c73c62119e00" exitCode=0 Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.815226 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x7qq7" Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.815209 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7qq7" event={"ID":"beb039f5-4ad1-49b5-8320-2fbc1e027846","Type":"ContainerDied","Data":"3425cd35dc3422901407d74f87c5119c96386eb7f13b778e8f79c73c62119e00"} Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.816610 4809 scope.go:117] "RemoveContainer" containerID="3425cd35dc3422901407d74f87c5119c96386eb7f13b778e8f79c73c62119e00" Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.816736 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7qq7" event={"ID":"beb039f5-4ad1-49b5-8320-2fbc1e027846","Type":"ContainerDied","Data":"abdd925ad4bcb4108ab7318b144b02a436af375b97f873677dd1e1210ca96fed"} Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.847314 4809 scope.go:117] "RemoveContainer" containerID="3411d46d8965723397fd4858faddbbaa753814a42c8e1c0dbd2914dfe629e3c3" Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.852616 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-x7qq7"] Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.863255 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-x7qq7"] Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.868414 4809 scope.go:117] "RemoveContainer" containerID="7c09bfa30194da2e3b4b4a99576d556f27e605208de2558f4620ff72be46481f" Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.925505 4809 scope.go:117] "RemoveContainer" containerID="3425cd35dc3422901407d74f87c5119c96386eb7f13b778e8f79c73c62119e00" Sep 30 01:50:58 crc kubenswrapper[4809]: E0930 01:50:58.925939 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3425cd35dc3422901407d74f87c5119c96386eb7f13b778e8f79c73c62119e00\": container with ID starting with 3425cd35dc3422901407d74f87c5119c96386eb7f13b778e8f79c73c62119e00 not found: ID does not exist" containerID="3425cd35dc3422901407d74f87c5119c96386eb7f13b778e8f79c73c62119e00" Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.925978 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3425cd35dc3422901407d74f87c5119c96386eb7f13b778e8f79c73c62119e00"} err="failed to get container status \"3425cd35dc3422901407d74f87c5119c96386eb7f13b778e8f79c73c62119e00\": rpc error: code = NotFound desc = could not find container \"3425cd35dc3422901407d74f87c5119c96386eb7f13b778e8f79c73c62119e00\": container with ID starting with 3425cd35dc3422901407d74f87c5119c96386eb7f13b778e8f79c73c62119e00 not found: ID does not exist" Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.926001 4809 scope.go:117] "RemoveContainer" containerID="3411d46d8965723397fd4858faddbbaa753814a42c8e1c0dbd2914dfe629e3c3" Sep 30 01:50:58 crc kubenswrapper[4809]: E0930 01:50:58.926235 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3411d46d8965723397fd4858faddbbaa753814a42c8e1c0dbd2914dfe629e3c3\": container with ID starting with 3411d46d8965723397fd4858faddbbaa753814a42c8e1c0dbd2914dfe629e3c3 not found: ID does not exist" containerID="3411d46d8965723397fd4858faddbbaa753814a42c8e1c0dbd2914dfe629e3c3" Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.926261 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3411d46d8965723397fd4858faddbbaa753814a42c8e1c0dbd2914dfe629e3c3"} err="failed to get container status \"3411d46d8965723397fd4858faddbbaa753814a42c8e1c0dbd2914dfe629e3c3\": rpc error: code = NotFound desc = could not find container \"3411d46d8965723397fd4858faddbbaa753814a42c8e1c0dbd2914dfe629e3c3\": container with ID starting with 3411d46d8965723397fd4858faddbbaa753814a42c8e1c0dbd2914dfe629e3c3 not found: ID does not exist" Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.926275 4809 scope.go:117] "RemoveContainer" containerID="7c09bfa30194da2e3b4b4a99576d556f27e605208de2558f4620ff72be46481f" Sep 30 01:50:58 crc kubenswrapper[4809]: E0930 01:50:58.926584 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c09bfa30194da2e3b4b4a99576d556f27e605208de2558f4620ff72be46481f\": container with ID starting with 7c09bfa30194da2e3b4b4a99576d556f27e605208de2558f4620ff72be46481f not found: ID does not exist" containerID="7c09bfa30194da2e3b4b4a99576d556f27e605208de2558f4620ff72be46481f" Sep 30 01:50:58 crc kubenswrapper[4809]: I0930 01:50:58.926636 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c09bfa30194da2e3b4b4a99576d556f27e605208de2558f4620ff72be46481f"} err="failed to get container status \"7c09bfa30194da2e3b4b4a99576d556f27e605208de2558f4620ff72be46481f\": rpc error: code = NotFound desc = could not find container \"7c09bfa30194da2e3b4b4a99576d556f27e605208de2558f4620ff72be46481f\": container with ID starting with 7c09bfa30194da2e3b4b4a99576d556f27e605208de2558f4620ff72be46481f not found: ID does not exist" Sep 30 01:50:59 crc kubenswrapper[4809]: I0930 01:50:59.709710 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="beb039f5-4ad1-49b5-8320-2fbc1e027846" path="/var/lib/kubelet/pods/beb039f5-4ad1-49b5-8320-2fbc1e027846/volumes" Sep 30 01:51:25 crc kubenswrapper[4809]: I0930 01:51:25.325215 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:51:25 crc kubenswrapper[4809]: I0930 01:51:25.325963 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:51:55 crc kubenswrapper[4809]: I0930 01:51:55.324487 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:51:55 crc kubenswrapper[4809]: I0930 01:51:55.325208 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:52:25 crc kubenswrapper[4809]: I0930 01:52:25.325102 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:52:25 crc kubenswrapper[4809]: I0930 01:52:25.327040 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:52:25 crc kubenswrapper[4809]: I0930 01:52:25.327131 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 01:52:25 crc kubenswrapper[4809]: I0930 01:52:25.328525 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bced84f5dab0f31b68f51c4794d1a1924b4390fb6ef61c6ae88443c3c03aeee8"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 01:52:25 crc kubenswrapper[4809]: I0930 01:52:25.328698 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://bced84f5dab0f31b68f51c4794d1a1924b4390fb6ef61c6ae88443c3c03aeee8" gracePeriod=600 Sep 30 01:52:25 crc kubenswrapper[4809]: I0930 01:52:25.945979 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="bced84f5dab0f31b68f51c4794d1a1924b4390fb6ef61c6ae88443c3c03aeee8" exitCode=0 Sep 30 01:52:25 crc kubenswrapper[4809]: I0930 01:52:25.946016 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"bced84f5dab0f31b68f51c4794d1a1924b4390fb6ef61c6ae88443c3c03aeee8"} Sep 30 01:52:25 crc kubenswrapper[4809]: I0930 01:52:25.946502 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652"} Sep 30 01:52:25 crc kubenswrapper[4809]: I0930 01:52:25.946533 4809 scope.go:117] "RemoveContainer" containerID="53ffe11219298f7b5a8736e0a331a40ccb9d8ef1453215ee89a9941ee2f1fb21" Sep 30 01:52:57 crc kubenswrapper[4809]: I0930 01:52:57.558385 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-psjmh"] Sep 30 01:52:57 crc kubenswrapper[4809]: E0930 01:52:57.559713 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="beb039f5-4ad1-49b5-8320-2fbc1e027846" containerName="registry-server" Sep 30 01:52:57 crc kubenswrapper[4809]: I0930 01:52:57.559741 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="beb039f5-4ad1-49b5-8320-2fbc1e027846" containerName="registry-server" Sep 30 01:52:57 crc kubenswrapper[4809]: E0930 01:52:57.559786 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="beb039f5-4ad1-49b5-8320-2fbc1e027846" containerName="extract-content" Sep 30 01:52:57 crc kubenswrapper[4809]: I0930 01:52:57.559794 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="beb039f5-4ad1-49b5-8320-2fbc1e027846" containerName="extract-content" Sep 30 01:52:57 crc kubenswrapper[4809]: E0930 01:52:57.559813 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="beb039f5-4ad1-49b5-8320-2fbc1e027846" containerName="extract-utilities" Sep 30 01:52:57 crc kubenswrapper[4809]: I0930 01:52:57.559821 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="beb039f5-4ad1-49b5-8320-2fbc1e027846" containerName="extract-utilities" Sep 30 01:52:57 crc kubenswrapper[4809]: I0930 01:52:57.560102 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="beb039f5-4ad1-49b5-8320-2fbc1e027846" containerName="registry-server" Sep 30 01:52:57 crc kubenswrapper[4809]: I0930 01:52:57.563007 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-psjmh" Sep 30 01:52:57 crc kubenswrapper[4809]: I0930 01:52:57.580393 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-psjmh"] Sep 30 01:52:57 crc kubenswrapper[4809]: I0930 01:52:57.701996 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4527013-ea3c-42f3-8150-68aa8532d049-utilities\") pod \"redhat-marketplace-psjmh\" (UID: \"b4527013-ea3c-42f3-8150-68aa8532d049\") " pod="openshift-marketplace/redhat-marketplace-psjmh" Sep 30 01:52:57 crc kubenswrapper[4809]: I0930 01:52:57.702071 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qc2rq\" (UniqueName: \"kubernetes.io/projected/b4527013-ea3c-42f3-8150-68aa8532d049-kube-api-access-qc2rq\") pod \"redhat-marketplace-psjmh\" (UID: \"b4527013-ea3c-42f3-8150-68aa8532d049\") " pod="openshift-marketplace/redhat-marketplace-psjmh" Sep 30 01:52:57 crc kubenswrapper[4809]: I0930 01:52:57.702135 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4527013-ea3c-42f3-8150-68aa8532d049-catalog-content\") pod \"redhat-marketplace-psjmh\" (UID: \"b4527013-ea3c-42f3-8150-68aa8532d049\") " pod="openshift-marketplace/redhat-marketplace-psjmh" Sep 30 01:52:57 crc kubenswrapper[4809]: I0930 01:52:57.803935 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4527013-ea3c-42f3-8150-68aa8532d049-utilities\") pod \"redhat-marketplace-psjmh\" (UID: \"b4527013-ea3c-42f3-8150-68aa8532d049\") " pod="openshift-marketplace/redhat-marketplace-psjmh" Sep 30 01:52:57 crc kubenswrapper[4809]: I0930 01:52:57.804052 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qc2rq\" (UniqueName: \"kubernetes.io/projected/b4527013-ea3c-42f3-8150-68aa8532d049-kube-api-access-qc2rq\") pod \"redhat-marketplace-psjmh\" (UID: \"b4527013-ea3c-42f3-8150-68aa8532d049\") " pod="openshift-marketplace/redhat-marketplace-psjmh" Sep 30 01:52:57 crc kubenswrapper[4809]: I0930 01:52:57.804105 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4527013-ea3c-42f3-8150-68aa8532d049-catalog-content\") pod \"redhat-marketplace-psjmh\" (UID: \"b4527013-ea3c-42f3-8150-68aa8532d049\") " pod="openshift-marketplace/redhat-marketplace-psjmh" Sep 30 01:52:57 crc kubenswrapper[4809]: I0930 01:52:57.805258 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4527013-ea3c-42f3-8150-68aa8532d049-utilities\") pod \"redhat-marketplace-psjmh\" (UID: \"b4527013-ea3c-42f3-8150-68aa8532d049\") " pod="openshift-marketplace/redhat-marketplace-psjmh" Sep 30 01:52:57 crc kubenswrapper[4809]: I0930 01:52:57.805334 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4527013-ea3c-42f3-8150-68aa8532d049-catalog-content\") pod \"redhat-marketplace-psjmh\" (UID: \"b4527013-ea3c-42f3-8150-68aa8532d049\") " pod="openshift-marketplace/redhat-marketplace-psjmh" Sep 30 01:52:57 crc kubenswrapper[4809]: I0930 01:52:57.843881 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qc2rq\" (UniqueName: \"kubernetes.io/projected/b4527013-ea3c-42f3-8150-68aa8532d049-kube-api-access-qc2rq\") pod \"redhat-marketplace-psjmh\" (UID: \"b4527013-ea3c-42f3-8150-68aa8532d049\") " pod="openshift-marketplace/redhat-marketplace-psjmh" Sep 30 01:52:57 crc kubenswrapper[4809]: I0930 01:52:57.889977 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-psjmh" Sep 30 01:52:58 crc kubenswrapper[4809]: I0930 01:52:58.395909 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-psjmh"] Sep 30 01:52:59 crc kubenswrapper[4809]: I0930 01:52:59.376892 4809 generic.go:334] "Generic (PLEG): container finished" podID="b4527013-ea3c-42f3-8150-68aa8532d049" containerID="6dc71a743ea8468cec4d9bdb5a2103969556f0aefb34036b93705a89abcb8372" exitCode=0 Sep 30 01:52:59 crc kubenswrapper[4809]: I0930 01:52:59.376992 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-psjmh" event={"ID":"b4527013-ea3c-42f3-8150-68aa8532d049","Type":"ContainerDied","Data":"6dc71a743ea8468cec4d9bdb5a2103969556f0aefb34036b93705a89abcb8372"} Sep 30 01:52:59 crc kubenswrapper[4809]: I0930 01:52:59.377186 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-psjmh" event={"ID":"b4527013-ea3c-42f3-8150-68aa8532d049","Type":"ContainerStarted","Data":"1d17f314e17062e7864dc7a8fd2b4268f5eb15431176b8b484118ddc57507e81"} Sep 30 01:53:00 crc kubenswrapper[4809]: I0930 01:53:00.389829 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-psjmh" event={"ID":"b4527013-ea3c-42f3-8150-68aa8532d049","Type":"ContainerStarted","Data":"c0d971be329f01b9a2ac95c0d0c7e90acd82dc7c03528079b1550db38efb6fbb"} Sep 30 01:53:01 crc kubenswrapper[4809]: I0930 01:53:01.399181 4809 generic.go:334] "Generic (PLEG): container finished" podID="b4527013-ea3c-42f3-8150-68aa8532d049" containerID="c0d971be329f01b9a2ac95c0d0c7e90acd82dc7c03528079b1550db38efb6fbb" exitCode=0 Sep 30 01:53:01 crc kubenswrapper[4809]: I0930 01:53:01.399225 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-psjmh" event={"ID":"b4527013-ea3c-42f3-8150-68aa8532d049","Type":"ContainerDied","Data":"c0d971be329f01b9a2ac95c0d0c7e90acd82dc7c03528079b1550db38efb6fbb"} Sep 30 01:53:02 crc kubenswrapper[4809]: I0930 01:53:02.410905 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-psjmh" event={"ID":"b4527013-ea3c-42f3-8150-68aa8532d049","Type":"ContainerStarted","Data":"7952cd972e64a61bfdbcdb0270925eb4ff6ba1fe9c9f79bc2d9b8fe34d883438"} Sep 30 01:53:02 crc kubenswrapper[4809]: I0930 01:53:02.429590 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-psjmh" podStartSLOduration=2.958641341 podStartE2EDuration="5.42957131s" podCreationTimestamp="2025-09-30 01:52:57 +0000 UTC" firstStartedPulling="2025-09-30 01:52:59.379954342 +0000 UTC m=+6230.416203760" lastFinishedPulling="2025-09-30 01:53:01.850884311 +0000 UTC m=+6232.887133729" observedRunningTime="2025-09-30 01:53:02.425371725 +0000 UTC m=+6233.461621153" watchObservedRunningTime="2025-09-30 01:53:02.42957131 +0000 UTC m=+6233.465820718" Sep 30 01:53:07 crc kubenswrapper[4809]: I0930 01:53:07.891095 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-psjmh" Sep 30 01:53:07 crc kubenswrapper[4809]: I0930 01:53:07.891706 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-psjmh" Sep 30 01:53:07 crc kubenswrapper[4809]: I0930 01:53:07.949030 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-psjmh" Sep 30 01:53:08 crc kubenswrapper[4809]: I0930 01:53:08.570238 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-psjmh" Sep 30 01:53:08 crc kubenswrapper[4809]: I0930 01:53:08.626351 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-psjmh"] Sep 30 01:53:10 crc kubenswrapper[4809]: I0930 01:53:10.529361 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-psjmh" podUID="b4527013-ea3c-42f3-8150-68aa8532d049" containerName="registry-server" containerID="cri-o://7952cd972e64a61bfdbcdb0270925eb4ff6ba1fe9c9f79bc2d9b8fe34d883438" gracePeriod=2 Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.097062 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-psjmh" Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.234513 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4527013-ea3c-42f3-8150-68aa8532d049-utilities\") pod \"b4527013-ea3c-42f3-8150-68aa8532d049\" (UID: \"b4527013-ea3c-42f3-8150-68aa8532d049\") " Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.234612 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4527013-ea3c-42f3-8150-68aa8532d049-catalog-content\") pod \"b4527013-ea3c-42f3-8150-68aa8532d049\" (UID: \"b4527013-ea3c-42f3-8150-68aa8532d049\") " Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.234823 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qc2rq\" (UniqueName: \"kubernetes.io/projected/b4527013-ea3c-42f3-8150-68aa8532d049-kube-api-access-qc2rq\") pod \"b4527013-ea3c-42f3-8150-68aa8532d049\" (UID: \"b4527013-ea3c-42f3-8150-68aa8532d049\") " Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.236447 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4527013-ea3c-42f3-8150-68aa8532d049-utilities" (OuterVolumeSpecName: "utilities") pod "b4527013-ea3c-42f3-8150-68aa8532d049" (UID: "b4527013-ea3c-42f3-8150-68aa8532d049"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.240385 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4527013-ea3c-42f3-8150-68aa8532d049-kube-api-access-qc2rq" (OuterVolumeSpecName: "kube-api-access-qc2rq") pod "b4527013-ea3c-42f3-8150-68aa8532d049" (UID: "b4527013-ea3c-42f3-8150-68aa8532d049"). InnerVolumeSpecName "kube-api-access-qc2rq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.253270 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4527013-ea3c-42f3-8150-68aa8532d049-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b4527013-ea3c-42f3-8150-68aa8532d049" (UID: "b4527013-ea3c-42f3-8150-68aa8532d049"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.336959 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qc2rq\" (UniqueName: \"kubernetes.io/projected/b4527013-ea3c-42f3-8150-68aa8532d049-kube-api-access-qc2rq\") on node \"crc\" DevicePath \"\"" Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.336994 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4527013-ea3c-42f3-8150-68aa8532d049-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.337005 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4527013-ea3c-42f3-8150-68aa8532d049-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.540090 4809 generic.go:334] "Generic (PLEG): container finished" podID="b4527013-ea3c-42f3-8150-68aa8532d049" containerID="7952cd972e64a61bfdbcdb0270925eb4ff6ba1fe9c9f79bc2d9b8fe34d883438" exitCode=0 Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.540144 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-psjmh" event={"ID":"b4527013-ea3c-42f3-8150-68aa8532d049","Type":"ContainerDied","Data":"7952cd972e64a61bfdbcdb0270925eb4ff6ba1fe9c9f79bc2d9b8fe34d883438"} Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.540177 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-psjmh" event={"ID":"b4527013-ea3c-42f3-8150-68aa8532d049","Type":"ContainerDied","Data":"1d17f314e17062e7864dc7a8fd2b4268f5eb15431176b8b484118ddc57507e81"} Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.540197 4809 scope.go:117] "RemoveContainer" containerID="7952cd972e64a61bfdbcdb0270925eb4ff6ba1fe9c9f79bc2d9b8fe34d883438" Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.540366 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-psjmh" Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.566845 4809 scope.go:117] "RemoveContainer" containerID="c0d971be329f01b9a2ac95c0d0c7e90acd82dc7c03528079b1550db38efb6fbb" Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.606603 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-psjmh"] Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.621489 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-psjmh"] Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.626911 4809 scope.go:117] "RemoveContainer" containerID="6dc71a743ea8468cec4d9bdb5a2103969556f0aefb34036b93705a89abcb8372" Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.687021 4809 scope.go:117] "RemoveContainer" containerID="7952cd972e64a61bfdbcdb0270925eb4ff6ba1fe9c9f79bc2d9b8fe34d883438" Sep 30 01:53:11 crc kubenswrapper[4809]: E0930 01:53:11.687701 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7952cd972e64a61bfdbcdb0270925eb4ff6ba1fe9c9f79bc2d9b8fe34d883438\": container with ID starting with 7952cd972e64a61bfdbcdb0270925eb4ff6ba1fe9c9f79bc2d9b8fe34d883438 not found: ID does not exist" containerID="7952cd972e64a61bfdbcdb0270925eb4ff6ba1fe9c9f79bc2d9b8fe34d883438" Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.687763 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7952cd972e64a61bfdbcdb0270925eb4ff6ba1fe9c9f79bc2d9b8fe34d883438"} err="failed to get container status \"7952cd972e64a61bfdbcdb0270925eb4ff6ba1fe9c9f79bc2d9b8fe34d883438\": rpc error: code = NotFound desc = could not find container \"7952cd972e64a61bfdbcdb0270925eb4ff6ba1fe9c9f79bc2d9b8fe34d883438\": container with ID starting with 7952cd972e64a61bfdbcdb0270925eb4ff6ba1fe9c9f79bc2d9b8fe34d883438 not found: ID does not exist" Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.688496 4809 scope.go:117] "RemoveContainer" containerID="c0d971be329f01b9a2ac95c0d0c7e90acd82dc7c03528079b1550db38efb6fbb" Sep 30 01:53:11 crc kubenswrapper[4809]: E0930 01:53:11.689024 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0d971be329f01b9a2ac95c0d0c7e90acd82dc7c03528079b1550db38efb6fbb\": container with ID starting with c0d971be329f01b9a2ac95c0d0c7e90acd82dc7c03528079b1550db38efb6fbb not found: ID does not exist" containerID="c0d971be329f01b9a2ac95c0d0c7e90acd82dc7c03528079b1550db38efb6fbb" Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.689061 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0d971be329f01b9a2ac95c0d0c7e90acd82dc7c03528079b1550db38efb6fbb"} err="failed to get container status \"c0d971be329f01b9a2ac95c0d0c7e90acd82dc7c03528079b1550db38efb6fbb\": rpc error: code = NotFound desc = could not find container \"c0d971be329f01b9a2ac95c0d0c7e90acd82dc7c03528079b1550db38efb6fbb\": container with ID starting with c0d971be329f01b9a2ac95c0d0c7e90acd82dc7c03528079b1550db38efb6fbb not found: ID does not exist" Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.689096 4809 scope.go:117] "RemoveContainer" containerID="6dc71a743ea8468cec4d9bdb5a2103969556f0aefb34036b93705a89abcb8372" Sep 30 01:53:11 crc kubenswrapper[4809]: E0930 01:53:11.689478 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6dc71a743ea8468cec4d9bdb5a2103969556f0aefb34036b93705a89abcb8372\": container with ID starting with 6dc71a743ea8468cec4d9bdb5a2103969556f0aefb34036b93705a89abcb8372 not found: ID does not exist" containerID="6dc71a743ea8468cec4d9bdb5a2103969556f0aefb34036b93705a89abcb8372" Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.689503 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6dc71a743ea8468cec4d9bdb5a2103969556f0aefb34036b93705a89abcb8372"} err="failed to get container status \"6dc71a743ea8468cec4d9bdb5a2103969556f0aefb34036b93705a89abcb8372\": rpc error: code = NotFound desc = could not find container \"6dc71a743ea8468cec4d9bdb5a2103969556f0aefb34036b93705a89abcb8372\": container with ID starting with 6dc71a743ea8468cec4d9bdb5a2103969556f0aefb34036b93705a89abcb8372 not found: ID does not exist" Sep 30 01:53:11 crc kubenswrapper[4809]: I0930 01:53:11.707377 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4527013-ea3c-42f3-8150-68aa8532d049" path="/var/lib/kubelet/pods/b4527013-ea3c-42f3-8150-68aa8532d049/volumes" Sep 30 01:54:25 crc kubenswrapper[4809]: I0930 01:54:25.325379 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:54:25 crc kubenswrapper[4809]: I0930 01:54:25.326085 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:54:55 crc kubenswrapper[4809]: I0930 01:54:55.325625 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:54:55 crc kubenswrapper[4809]: I0930 01:54:55.326390 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:55:25 crc kubenswrapper[4809]: I0930 01:55:25.325006 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:55:25 crc kubenswrapper[4809]: I0930 01:55:25.325667 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:55:25 crc kubenswrapper[4809]: I0930 01:55:25.325729 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 01:55:25 crc kubenswrapper[4809]: I0930 01:55:25.326773 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 01:55:25 crc kubenswrapper[4809]: I0930 01:55:25.326844 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" gracePeriod=600 Sep 30 01:55:25 crc kubenswrapper[4809]: E0930 01:55:25.477937 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:55:26 crc kubenswrapper[4809]: I0930 01:55:26.190885 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" exitCode=0 Sep 30 01:55:26 crc kubenswrapper[4809]: I0930 01:55:26.190990 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652"} Sep 30 01:55:26 crc kubenswrapper[4809]: I0930 01:55:26.191312 4809 scope.go:117] "RemoveContainer" containerID="bced84f5dab0f31b68f51c4794d1a1924b4390fb6ef61c6ae88443c3c03aeee8" Sep 30 01:55:26 crc kubenswrapper[4809]: I0930 01:55:26.192405 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:55:26 crc kubenswrapper[4809]: E0930 01:55:26.192761 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:55:36 crc kubenswrapper[4809]: I0930 01:55:36.691405 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:55:36 crc kubenswrapper[4809]: E0930 01:55:36.692179 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:55:50 crc kubenswrapper[4809]: I0930 01:55:50.691466 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:55:50 crc kubenswrapper[4809]: E0930 01:55:50.692365 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:56:03 crc kubenswrapper[4809]: I0930 01:56:03.692166 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:56:03 crc kubenswrapper[4809]: E0930 01:56:03.692865 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:56:17 crc kubenswrapper[4809]: I0930 01:56:17.693575 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:56:17 crc kubenswrapper[4809]: E0930 01:56:17.695485 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:56:32 crc kubenswrapper[4809]: I0930 01:56:32.691242 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:56:32 crc kubenswrapper[4809]: E0930 01:56:32.692284 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:56:45 crc kubenswrapper[4809]: I0930 01:56:45.690961 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:56:45 crc kubenswrapper[4809]: E0930 01:56:45.691679 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:56:57 crc kubenswrapper[4809]: I0930 01:56:57.692692 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:56:57 crc kubenswrapper[4809]: E0930 01:56:57.695623 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:57:09 crc kubenswrapper[4809]: I0930 01:57:09.711254 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:57:09 crc kubenswrapper[4809]: E0930 01:57:09.712502 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:57:21 crc kubenswrapper[4809]: I0930 01:57:21.699594 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:57:21 crc kubenswrapper[4809]: E0930 01:57:21.706149 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:57:22 crc kubenswrapper[4809]: E0930 01:57:22.563503 4809 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.129.56.24:55718->38.129.56.24:38341: write tcp 38.129.56.24:55718->38.129.56.24:38341: write: broken pipe Sep 30 01:57:33 crc kubenswrapper[4809]: I0930 01:57:33.691706 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:57:33 crc kubenswrapper[4809]: E0930 01:57:33.692543 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:57:48 crc kubenswrapper[4809]: I0930 01:57:48.691379 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:57:48 crc kubenswrapper[4809]: E0930 01:57:48.693001 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:57:59 crc kubenswrapper[4809]: I0930 01:57:59.702204 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:57:59 crc kubenswrapper[4809]: E0930 01:57:59.703324 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:58:14 crc kubenswrapper[4809]: I0930 01:58:14.691296 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:58:14 crc kubenswrapper[4809]: E0930 01:58:14.691950 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:58:28 crc kubenswrapper[4809]: I0930 01:58:28.690584 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:58:28 crc kubenswrapper[4809]: E0930 01:58:28.691382 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:58:43 crc kubenswrapper[4809]: I0930 01:58:43.691755 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:58:43 crc kubenswrapper[4809]: E0930 01:58:43.693037 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:58:57 crc kubenswrapper[4809]: I0930 01:58:57.691484 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:58:57 crc kubenswrapper[4809]: E0930 01:58:57.692588 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:59:09 crc kubenswrapper[4809]: I0930 01:59:09.703135 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:59:09 crc kubenswrapper[4809]: E0930 01:59:09.704002 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:59:23 crc kubenswrapper[4809]: I0930 01:59:23.691249 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:59:23 crc kubenswrapper[4809]: E0930 01:59:23.692261 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:59:34 crc kubenswrapper[4809]: I0930 01:59:34.691583 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:59:34 crc kubenswrapper[4809]: E0930 01:59:34.692449 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 01:59:48 crc kubenswrapper[4809]: I0930 01:59:48.691000 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 01:59:48 crc kubenswrapper[4809]: E0930 01:59:48.692228 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:00:00 crc kubenswrapper[4809]: I0930 02:00:00.232208 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q"] Sep 30 02:00:00 crc kubenswrapper[4809]: E0930 02:00:00.233443 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4527013-ea3c-42f3-8150-68aa8532d049" containerName="extract-content" Sep 30 02:00:00 crc kubenswrapper[4809]: I0930 02:00:00.233464 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4527013-ea3c-42f3-8150-68aa8532d049" containerName="extract-content" Sep 30 02:00:00 crc kubenswrapper[4809]: E0930 02:00:00.233518 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4527013-ea3c-42f3-8150-68aa8532d049" containerName="extract-utilities" Sep 30 02:00:00 crc kubenswrapper[4809]: I0930 02:00:00.233531 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4527013-ea3c-42f3-8150-68aa8532d049" containerName="extract-utilities" Sep 30 02:00:00 crc kubenswrapper[4809]: E0930 02:00:00.233554 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4527013-ea3c-42f3-8150-68aa8532d049" containerName="registry-server" Sep 30 02:00:00 crc kubenswrapper[4809]: I0930 02:00:00.233565 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4527013-ea3c-42f3-8150-68aa8532d049" containerName="registry-server" Sep 30 02:00:00 crc kubenswrapper[4809]: I0930 02:00:00.234243 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4527013-ea3c-42f3-8150-68aa8532d049" containerName="registry-server" Sep 30 02:00:00 crc kubenswrapper[4809]: I0930 02:00:00.235746 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q" Sep 30 02:00:00 crc kubenswrapper[4809]: I0930 02:00:00.238944 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 02:00:00 crc kubenswrapper[4809]: I0930 02:00:00.241883 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 02:00:00 crc kubenswrapper[4809]: I0930 02:00:00.252368 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q"] Sep 30 02:00:00 crc kubenswrapper[4809]: I0930 02:00:00.397553 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ce6f380-e28b-4698-8466-b1ba874c2968-config-volume\") pod \"collect-profiles-29319960-w795q\" (UID: \"3ce6f380-e28b-4698-8466-b1ba874c2968\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q" Sep 30 02:00:00 crc kubenswrapper[4809]: I0930 02:00:00.397651 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ce6f380-e28b-4698-8466-b1ba874c2968-secret-volume\") pod \"collect-profiles-29319960-w795q\" (UID: \"3ce6f380-e28b-4698-8466-b1ba874c2968\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q" Sep 30 02:00:00 crc kubenswrapper[4809]: I0930 02:00:00.397764 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5s87\" (UniqueName: \"kubernetes.io/projected/3ce6f380-e28b-4698-8466-b1ba874c2968-kube-api-access-k5s87\") pod \"collect-profiles-29319960-w795q\" (UID: \"3ce6f380-e28b-4698-8466-b1ba874c2968\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q" Sep 30 02:00:00 crc kubenswrapper[4809]: I0930 02:00:00.499916 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5s87\" (UniqueName: \"kubernetes.io/projected/3ce6f380-e28b-4698-8466-b1ba874c2968-kube-api-access-k5s87\") pod \"collect-profiles-29319960-w795q\" (UID: \"3ce6f380-e28b-4698-8466-b1ba874c2968\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q" Sep 30 02:00:00 crc kubenswrapper[4809]: I0930 02:00:00.500201 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ce6f380-e28b-4698-8466-b1ba874c2968-config-volume\") pod \"collect-profiles-29319960-w795q\" (UID: \"3ce6f380-e28b-4698-8466-b1ba874c2968\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q" Sep 30 02:00:00 crc kubenswrapper[4809]: I0930 02:00:00.500235 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ce6f380-e28b-4698-8466-b1ba874c2968-secret-volume\") pod \"collect-profiles-29319960-w795q\" (UID: \"3ce6f380-e28b-4698-8466-b1ba874c2968\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q" Sep 30 02:00:00 crc kubenswrapper[4809]: I0930 02:00:00.501700 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ce6f380-e28b-4698-8466-b1ba874c2968-config-volume\") pod \"collect-profiles-29319960-w795q\" (UID: \"3ce6f380-e28b-4698-8466-b1ba874c2968\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q" Sep 30 02:00:00 crc kubenswrapper[4809]: I0930 02:00:00.520329 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ce6f380-e28b-4698-8466-b1ba874c2968-secret-volume\") pod \"collect-profiles-29319960-w795q\" (UID: \"3ce6f380-e28b-4698-8466-b1ba874c2968\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q" Sep 30 02:00:00 crc kubenswrapper[4809]: I0930 02:00:00.527124 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5s87\" (UniqueName: \"kubernetes.io/projected/3ce6f380-e28b-4698-8466-b1ba874c2968-kube-api-access-k5s87\") pod \"collect-profiles-29319960-w795q\" (UID: \"3ce6f380-e28b-4698-8466-b1ba874c2968\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q" Sep 30 02:00:00 crc kubenswrapper[4809]: I0930 02:00:00.566514 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q" Sep 30 02:00:00 crc kubenswrapper[4809]: I0930 02:00:00.694097 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 02:00:00 crc kubenswrapper[4809]: E0930 02:00:00.694661 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:00:01 crc kubenswrapper[4809]: I0930 02:00:01.093339 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q"] Sep 30 02:00:01 crc kubenswrapper[4809]: I0930 02:00:01.448081 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q" event={"ID":"3ce6f380-e28b-4698-8466-b1ba874c2968","Type":"ContainerStarted","Data":"a6732ff5d800ccdf98839db5325a5626b2f4149b53f3ee2de91ac945bedd5182"} Sep 30 02:00:01 crc kubenswrapper[4809]: I0930 02:00:01.448503 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q" event={"ID":"3ce6f380-e28b-4698-8466-b1ba874c2968","Type":"ContainerStarted","Data":"fd05c1ff70d65e57af7d278d5c4996037557e24a424b25053e09adbc1098ef78"} Sep 30 02:00:01 crc kubenswrapper[4809]: I0930 02:00:01.469129 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q" podStartSLOduration=1.469112033 podStartE2EDuration="1.469112033s" podCreationTimestamp="2025-09-30 02:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 02:00:01.466253845 +0000 UTC m=+6652.502503303" watchObservedRunningTime="2025-09-30 02:00:01.469112033 +0000 UTC m=+6652.505361441" Sep 30 02:00:02 crc kubenswrapper[4809]: I0930 02:00:02.461101 4809 generic.go:334] "Generic (PLEG): container finished" podID="3ce6f380-e28b-4698-8466-b1ba874c2968" containerID="a6732ff5d800ccdf98839db5325a5626b2f4149b53f3ee2de91ac945bedd5182" exitCode=0 Sep 30 02:00:02 crc kubenswrapper[4809]: I0930 02:00:02.461131 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q" event={"ID":"3ce6f380-e28b-4698-8466-b1ba874c2968","Type":"ContainerDied","Data":"a6732ff5d800ccdf98839db5325a5626b2f4149b53f3ee2de91ac945bedd5182"} Sep 30 02:00:03 crc kubenswrapper[4809]: I0930 02:00:03.870825 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q" Sep 30 02:00:03 crc kubenswrapper[4809]: I0930 02:00:03.987678 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ce6f380-e28b-4698-8466-b1ba874c2968-config-volume\") pod \"3ce6f380-e28b-4698-8466-b1ba874c2968\" (UID: \"3ce6f380-e28b-4698-8466-b1ba874c2968\") " Sep 30 02:00:03 crc kubenswrapper[4809]: I0930 02:00:03.987865 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ce6f380-e28b-4698-8466-b1ba874c2968-secret-volume\") pod \"3ce6f380-e28b-4698-8466-b1ba874c2968\" (UID: \"3ce6f380-e28b-4698-8466-b1ba874c2968\") " Sep 30 02:00:03 crc kubenswrapper[4809]: I0930 02:00:03.987950 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5s87\" (UniqueName: \"kubernetes.io/projected/3ce6f380-e28b-4698-8466-b1ba874c2968-kube-api-access-k5s87\") pod \"3ce6f380-e28b-4698-8466-b1ba874c2968\" (UID: \"3ce6f380-e28b-4698-8466-b1ba874c2968\") " Sep 30 02:00:03 crc kubenswrapper[4809]: I0930 02:00:03.988326 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ce6f380-e28b-4698-8466-b1ba874c2968-config-volume" (OuterVolumeSpecName: "config-volume") pod "3ce6f380-e28b-4698-8466-b1ba874c2968" (UID: "3ce6f380-e28b-4698-8466-b1ba874c2968"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 02:00:03 crc kubenswrapper[4809]: I0930 02:00:03.989674 4809 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ce6f380-e28b-4698-8466-b1ba874c2968-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 02:00:03 crc kubenswrapper[4809]: I0930 02:00:03.995114 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ce6f380-e28b-4698-8466-b1ba874c2968-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3ce6f380-e28b-4698-8466-b1ba874c2968" (UID: "3ce6f380-e28b-4698-8466-b1ba874c2968"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 02:00:03 crc kubenswrapper[4809]: I0930 02:00:03.995162 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ce6f380-e28b-4698-8466-b1ba874c2968-kube-api-access-k5s87" (OuterVolumeSpecName: "kube-api-access-k5s87") pod "3ce6f380-e28b-4698-8466-b1ba874c2968" (UID: "3ce6f380-e28b-4698-8466-b1ba874c2968"). InnerVolumeSpecName "kube-api-access-k5s87". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:00:04 crc kubenswrapper[4809]: I0930 02:00:04.091113 4809 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ce6f380-e28b-4698-8466-b1ba874c2968-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 02:00:04 crc kubenswrapper[4809]: I0930 02:00:04.091146 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5s87\" (UniqueName: \"kubernetes.io/projected/3ce6f380-e28b-4698-8466-b1ba874c2968-kube-api-access-k5s87\") on node \"crc\" DevicePath \"\"" Sep 30 02:00:04 crc kubenswrapper[4809]: I0930 02:00:04.484464 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q" event={"ID":"3ce6f380-e28b-4698-8466-b1ba874c2968","Type":"ContainerDied","Data":"fd05c1ff70d65e57af7d278d5c4996037557e24a424b25053e09adbc1098ef78"} Sep 30 02:00:04 crc kubenswrapper[4809]: I0930 02:00:04.484854 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd05c1ff70d65e57af7d278d5c4996037557e24a424b25053e09adbc1098ef78" Sep 30 02:00:04 crc kubenswrapper[4809]: I0930 02:00:04.484507 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q" Sep 30 02:00:04 crc kubenswrapper[4809]: I0930 02:00:04.559938 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr"] Sep 30 02:00:04 crc kubenswrapper[4809]: I0930 02:00:04.571815 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319915-8gcbr"] Sep 30 02:00:05 crc kubenswrapper[4809]: I0930 02:00:05.712419 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66aef030-5824-4b79-a039-650509c79935" path="/var/lib/kubelet/pods/66aef030-5824-4b79-a039-650509c79935/volumes" Sep 30 02:00:11 crc kubenswrapper[4809]: I0930 02:00:11.694189 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 02:00:11 crc kubenswrapper[4809]: E0930 02:00:11.695959 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:00:19 crc kubenswrapper[4809]: I0930 02:00:19.970200 4809 scope.go:117] "RemoveContainer" containerID="e561ed83d2af3807088663cb7ecfb19b881982c95729b822b730bc27adfc2268" Sep 30 02:00:22 crc kubenswrapper[4809]: I0930 02:00:22.690827 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 02:00:22 crc kubenswrapper[4809]: E0930 02:00:22.691864 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:00:37 crc kubenswrapper[4809]: I0930 02:00:37.691177 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 02:00:38 crc kubenswrapper[4809]: I0930 02:00:38.905402 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"458b55bd463e7782a8d22a7687a71bd620504535dd2a4c4447bbfdda853ab8da"} Sep 30 02:00:48 crc kubenswrapper[4809]: I0930 02:00:48.312867 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vk5gz"] Sep 30 02:00:48 crc kubenswrapper[4809]: E0930 02:00:48.314318 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ce6f380-e28b-4698-8466-b1ba874c2968" containerName="collect-profiles" Sep 30 02:00:48 crc kubenswrapper[4809]: I0930 02:00:48.314336 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ce6f380-e28b-4698-8466-b1ba874c2968" containerName="collect-profiles" Sep 30 02:00:48 crc kubenswrapper[4809]: I0930 02:00:48.314622 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ce6f380-e28b-4698-8466-b1ba874c2968" containerName="collect-profiles" Sep 30 02:00:48 crc kubenswrapper[4809]: I0930 02:00:48.316677 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vk5gz" Sep 30 02:00:48 crc kubenswrapper[4809]: I0930 02:00:48.328687 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vk5gz"] Sep 30 02:00:48 crc kubenswrapper[4809]: I0930 02:00:48.389368 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84d5d561-6198-4254-9c20-b0873920b063-catalog-content\") pod \"redhat-operators-vk5gz\" (UID: \"84d5d561-6198-4254-9c20-b0873920b063\") " pod="openshift-marketplace/redhat-operators-vk5gz" Sep 30 02:00:48 crc kubenswrapper[4809]: I0930 02:00:48.389428 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwqpz\" (UniqueName: \"kubernetes.io/projected/84d5d561-6198-4254-9c20-b0873920b063-kube-api-access-bwqpz\") pod \"redhat-operators-vk5gz\" (UID: \"84d5d561-6198-4254-9c20-b0873920b063\") " pod="openshift-marketplace/redhat-operators-vk5gz" Sep 30 02:00:48 crc kubenswrapper[4809]: I0930 02:00:48.389925 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84d5d561-6198-4254-9c20-b0873920b063-utilities\") pod \"redhat-operators-vk5gz\" (UID: \"84d5d561-6198-4254-9c20-b0873920b063\") " pod="openshift-marketplace/redhat-operators-vk5gz" Sep 30 02:00:48 crc kubenswrapper[4809]: I0930 02:00:48.493153 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84d5d561-6198-4254-9c20-b0873920b063-utilities\") pod \"redhat-operators-vk5gz\" (UID: \"84d5d561-6198-4254-9c20-b0873920b063\") " pod="openshift-marketplace/redhat-operators-vk5gz" Sep 30 02:00:48 crc kubenswrapper[4809]: I0930 02:00:48.493289 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84d5d561-6198-4254-9c20-b0873920b063-catalog-content\") pod \"redhat-operators-vk5gz\" (UID: \"84d5d561-6198-4254-9c20-b0873920b063\") " pod="openshift-marketplace/redhat-operators-vk5gz" Sep 30 02:00:48 crc kubenswrapper[4809]: I0930 02:00:48.493330 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwqpz\" (UniqueName: \"kubernetes.io/projected/84d5d561-6198-4254-9c20-b0873920b063-kube-api-access-bwqpz\") pod \"redhat-operators-vk5gz\" (UID: \"84d5d561-6198-4254-9c20-b0873920b063\") " pod="openshift-marketplace/redhat-operators-vk5gz" Sep 30 02:00:48 crc kubenswrapper[4809]: I0930 02:00:48.494023 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84d5d561-6198-4254-9c20-b0873920b063-utilities\") pod \"redhat-operators-vk5gz\" (UID: \"84d5d561-6198-4254-9c20-b0873920b063\") " pod="openshift-marketplace/redhat-operators-vk5gz" Sep 30 02:00:48 crc kubenswrapper[4809]: I0930 02:00:48.494138 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84d5d561-6198-4254-9c20-b0873920b063-catalog-content\") pod \"redhat-operators-vk5gz\" (UID: \"84d5d561-6198-4254-9c20-b0873920b063\") " pod="openshift-marketplace/redhat-operators-vk5gz" Sep 30 02:00:48 crc kubenswrapper[4809]: I0930 02:00:48.516870 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwqpz\" (UniqueName: \"kubernetes.io/projected/84d5d561-6198-4254-9c20-b0873920b063-kube-api-access-bwqpz\") pod \"redhat-operators-vk5gz\" (UID: \"84d5d561-6198-4254-9c20-b0873920b063\") " pod="openshift-marketplace/redhat-operators-vk5gz" Sep 30 02:00:48 crc kubenswrapper[4809]: I0930 02:00:48.639431 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vk5gz" Sep 30 02:00:49 crc kubenswrapper[4809]: I0930 02:00:49.196068 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vk5gz"] Sep 30 02:00:50 crc kubenswrapper[4809]: I0930 02:00:50.024318 4809 generic.go:334] "Generic (PLEG): container finished" podID="84d5d561-6198-4254-9c20-b0873920b063" containerID="e1d421767d84c28e3c40b506fb825152551cc0e5d5abd47ed26078fc0673229f" exitCode=0 Sep 30 02:00:50 crc kubenswrapper[4809]: I0930 02:00:50.024632 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vk5gz" event={"ID":"84d5d561-6198-4254-9c20-b0873920b063","Type":"ContainerDied","Data":"e1d421767d84c28e3c40b506fb825152551cc0e5d5abd47ed26078fc0673229f"} Sep 30 02:00:50 crc kubenswrapper[4809]: I0930 02:00:50.024678 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vk5gz" event={"ID":"84d5d561-6198-4254-9c20-b0873920b063","Type":"ContainerStarted","Data":"75179ef0a35815075d528325d752d91a1333a826b8f9d33219d93f7fd29874f8"} Sep 30 02:00:50 crc kubenswrapper[4809]: I0930 02:00:50.026599 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 02:00:52 crc kubenswrapper[4809]: I0930 02:00:52.053347 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vk5gz" event={"ID":"84d5d561-6198-4254-9c20-b0873920b063","Type":"ContainerStarted","Data":"022136811f813bd592f5d1dc7b9f7a0b8b48dc53f231cf304885b24728dc2d90"} Sep 30 02:00:55 crc kubenswrapper[4809]: I0930 02:00:55.090223 4809 generic.go:334] "Generic (PLEG): container finished" podID="84d5d561-6198-4254-9c20-b0873920b063" containerID="022136811f813bd592f5d1dc7b9f7a0b8b48dc53f231cf304885b24728dc2d90" exitCode=0 Sep 30 02:00:55 crc kubenswrapper[4809]: I0930 02:00:55.090675 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vk5gz" event={"ID":"84d5d561-6198-4254-9c20-b0873920b063","Type":"ContainerDied","Data":"022136811f813bd592f5d1dc7b9f7a0b8b48dc53f231cf304885b24728dc2d90"} Sep 30 02:00:56 crc kubenswrapper[4809]: I0930 02:00:56.107500 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vk5gz" event={"ID":"84d5d561-6198-4254-9c20-b0873920b063","Type":"ContainerStarted","Data":"2dc920b7c6ab703050fb8131d038ca77069a067ca060c1cf88b8bfb70bc7aebd"} Sep 30 02:00:56 crc kubenswrapper[4809]: I0930 02:00:56.134547 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vk5gz" podStartSLOduration=2.473999955 podStartE2EDuration="8.134527497s" podCreationTimestamp="2025-09-30 02:00:48 +0000 UTC" firstStartedPulling="2025-09-30 02:00:50.026292945 +0000 UTC m=+6701.062542363" lastFinishedPulling="2025-09-30 02:00:55.686820487 +0000 UTC m=+6706.723069905" observedRunningTime="2025-09-30 02:00:56.127366091 +0000 UTC m=+6707.163615499" watchObservedRunningTime="2025-09-30 02:00:56.134527497 +0000 UTC m=+6707.170776905" Sep 30 02:00:58 crc kubenswrapper[4809]: I0930 02:00:58.640125 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vk5gz" Sep 30 02:00:58 crc kubenswrapper[4809]: I0930 02:00:58.641026 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vk5gz" Sep 30 02:00:59 crc kubenswrapper[4809]: I0930 02:00:59.709460 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vk5gz" podUID="84d5d561-6198-4254-9c20-b0873920b063" containerName="registry-server" probeResult="failure" output=< Sep 30 02:00:59 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 02:00:59 crc kubenswrapper[4809]: > Sep 30 02:01:00 crc kubenswrapper[4809]: I0930 02:01:00.157216 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29319961-shptp"] Sep 30 02:01:00 crc kubenswrapper[4809]: I0930 02:01:00.159417 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319961-shptp" Sep 30 02:01:00 crc kubenswrapper[4809]: I0930 02:01:00.168157 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319961-shptp"] Sep 30 02:01:00 crc kubenswrapper[4809]: I0930 02:01:00.208848 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/816eef70-b240-48ed-93d3-813d38b2fe12-combined-ca-bundle\") pod \"keystone-cron-29319961-shptp\" (UID: \"816eef70-b240-48ed-93d3-813d38b2fe12\") " pod="openstack/keystone-cron-29319961-shptp" Sep 30 02:01:00 crc kubenswrapper[4809]: I0930 02:01:00.209274 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/816eef70-b240-48ed-93d3-813d38b2fe12-fernet-keys\") pod \"keystone-cron-29319961-shptp\" (UID: \"816eef70-b240-48ed-93d3-813d38b2fe12\") " pod="openstack/keystone-cron-29319961-shptp" Sep 30 02:01:00 crc kubenswrapper[4809]: I0930 02:01:00.209415 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmmzd\" (UniqueName: \"kubernetes.io/projected/816eef70-b240-48ed-93d3-813d38b2fe12-kube-api-access-gmmzd\") pod \"keystone-cron-29319961-shptp\" (UID: \"816eef70-b240-48ed-93d3-813d38b2fe12\") " pod="openstack/keystone-cron-29319961-shptp" Sep 30 02:01:00 crc kubenswrapper[4809]: I0930 02:01:00.209553 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/816eef70-b240-48ed-93d3-813d38b2fe12-config-data\") pod \"keystone-cron-29319961-shptp\" (UID: \"816eef70-b240-48ed-93d3-813d38b2fe12\") " pod="openstack/keystone-cron-29319961-shptp" Sep 30 02:01:00 crc kubenswrapper[4809]: I0930 02:01:00.312602 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/816eef70-b240-48ed-93d3-813d38b2fe12-fernet-keys\") pod \"keystone-cron-29319961-shptp\" (UID: \"816eef70-b240-48ed-93d3-813d38b2fe12\") " pod="openstack/keystone-cron-29319961-shptp" Sep 30 02:01:00 crc kubenswrapper[4809]: I0930 02:01:00.312696 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmmzd\" (UniqueName: \"kubernetes.io/projected/816eef70-b240-48ed-93d3-813d38b2fe12-kube-api-access-gmmzd\") pod \"keystone-cron-29319961-shptp\" (UID: \"816eef70-b240-48ed-93d3-813d38b2fe12\") " pod="openstack/keystone-cron-29319961-shptp" Sep 30 02:01:00 crc kubenswrapper[4809]: I0930 02:01:00.312738 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/816eef70-b240-48ed-93d3-813d38b2fe12-config-data\") pod \"keystone-cron-29319961-shptp\" (UID: \"816eef70-b240-48ed-93d3-813d38b2fe12\") " pod="openstack/keystone-cron-29319961-shptp" Sep 30 02:01:00 crc kubenswrapper[4809]: I0930 02:01:00.312869 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/816eef70-b240-48ed-93d3-813d38b2fe12-combined-ca-bundle\") pod \"keystone-cron-29319961-shptp\" (UID: \"816eef70-b240-48ed-93d3-813d38b2fe12\") " pod="openstack/keystone-cron-29319961-shptp" Sep 30 02:01:00 crc kubenswrapper[4809]: I0930 02:01:00.319283 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/816eef70-b240-48ed-93d3-813d38b2fe12-fernet-keys\") pod \"keystone-cron-29319961-shptp\" (UID: \"816eef70-b240-48ed-93d3-813d38b2fe12\") " pod="openstack/keystone-cron-29319961-shptp" Sep 30 02:01:00 crc kubenswrapper[4809]: I0930 02:01:00.319405 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/816eef70-b240-48ed-93d3-813d38b2fe12-combined-ca-bundle\") pod \"keystone-cron-29319961-shptp\" (UID: \"816eef70-b240-48ed-93d3-813d38b2fe12\") " pod="openstack/keystone-cron-29319961-shptp" Sep 30 02:01:00 crc kubenswrapper[4809]: I0930 02:01:00.320021 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/816eef70-b240-48ed-93d3-813d38b2fe12-config-data\") pod \"keystone-cron-29319961-shptp\" (UID: \"816eef70-b240-48ed-93d3-813d38b2fe12\") " pod="openstack/keystone-cron-29319961-shptp" Sep 30 02:01:00 crc kubenswrapper[4809]: I0930 02:01:00.337824 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmmzd\" (UniqueName: \"kubernetes.io/projected/816eef70-b240-48ed-93d3-813d38b2fe12-kube-api-access-gmmzd\") pod \"keystone-cron-29319961-shptp\" (UID: \"816eef70-b240-48ed-93d3-813d38b2fe12\") " pod="openstack/keystone-cron-29319961-shptp" Sep 30 02:01:00 crc kubenswrapper[4809]: I0930 02:01:00.488818 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319961-shptp" Sep 30 02:01:01 crc kubenswrapper[4809]: I0930 02:01:01.030972 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319961-shptp"] Sep 30 02:01:01 crc kubenswrapper[4809]: I0930 02:01:01.159349 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319961-shptp" event={"ID":"816eef70-b240-48ed-93d3-813d38b2fe12","Type":"ContainerStarted","Data":"8beff6ba2f20b5ac174d849fcb35edbc71b15e5f877d201caa91966ed3682210"} Sep 30 02:01:02 crc kubenswrapper[4809]: I0930 02:01:02.173201 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319961-shptp" event={"ID":"816eef70-b240-48ed-93d3-813d38b2fe12","Type":"ContainerStarted","Data":"0635f3c8a0d1a1add16bb5d90f6e0f0b96e6bcc0e4b3cd6e8034019c34822170"} Sep 30 02:01:02 crc kubenswrapper[4809]: I0930 02:01:02.221779 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29319961-shptp" podStartSLOduration=2.221754965 podStartE2EDuration="2.221754965s" podCreationTimestamp="2025-09-30 02:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 02:01:02.19810995 +0000 UTC m=+6713.234359368" watchObservedRunningTime="2025-09-30 02:01:02.221754965 +0000 UTC m=+6713.258004383" Sep 30 02:01:05 crc kubenswrapper[4809]: I0930 02:01:05.206269 4809 generic.go:334] "Generic (PLEG): container finished" podID="816eef70-b240-48ed-93d3-813d38b2fe12" containerID="0635f3c8a0d1a1add16bb5d90f6e0f0b96e6bcc0e4b3cd6e8034019c34822170" exitCode=0 Sep 30 02:01:05 crc kubenswrapper[4809]: I0930 02:01:05.206356 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319961-shptp" event={"ID":"816eef70-b240-48ed-93d3-813d38b2fe12","Type":"ContainerDied","Data":"0635f3c8a0d1a1add16bb5d90f6e0f0b96e6bcc0e4b3cd6e8034019c34822170"} Sep 30 02:01:06 crc kubenswrapper[4809]: I0930 02:01:06.728283 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319961-shptp" Sep 30 02:01:06 crc kubenswrapper[4809]: I0930 02:01:06.769259 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmmzd\" (UniqueName: \"kubernetes.io/projected/816eef70-b240-48ed-93d3-813d38b2fe12-kube-api-access-gmmzd\") pod \"816eef70-b240-48ed-93d3-813d38b2fe12\" (UID: \"816eef70-b240-48ed-93d3-813d38b2fe12\") " Sep 30 02:01:06 crc kubenswrapper[4809]: I0930 02:01:06.769456 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/816eef70-b240-48ed-93d3-813d38b2fe12-config-data\") pod \"816eef70-b240-48ed-93d3-813d38b2fe12\" (UID: \"816eef70-b240-48ed-93d3-813d38b2fe12\") " Sep 30 02:01:06 crc kubenswrapper[4809]: I0930 02:01:06.769547 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/816eef70-b240-48ed-93d3-813d38b2fe12-combined-ca-bundle\") pod \"816eef70-b240-48ed-93d3-813d38b2fe12\" (UID: \"816eef70-b240-48ed-93d3-813d38b2fe12\") " Sep 30 02:01:06 crc kubenswrapper[4809]: I0930 02:01:06.769887 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/816eef70-b240-48ed-93d3-813d38b2fe12-fernet-keys\") pod \"816eef70-b240-48ed-93d3-813d38b2fe12\" (UID: \"816eef70-b240-48ed-93d3-813d38b2fe12\") " Sep 30 02:01:06 crc kubenswrapper[4809]: I0930 02:01:06.785957 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/816eef70-b240-48ed-93d3-813d38b2fe12-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "816eef70-b240-48ed-93d3-813d38b2fe12" (UID: "816eef70-b240-48ed-93d3-813d38b2fe12"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 02:01:06 crc kubenswrapper[4809]: I0930 02:01:06.785999 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/816eef70-b240-48ed-93d3-813d38b2fe12-kube-api-access-gmmzd" (OuterVolumeSpecName: "kube-api-access-gmmzd") pod "816eef70-b240-48ed-93d3-813d38b2fe12" (UID: "816eef70-b240-48ed-93d3-813d38b2fe12"). InnerVolumeSpecName "kube-api-access-gmmzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:01:06 crc kubenswrapper[4809]: I0930 02:01:06.812854 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/816eef70-b240-48ed-93d3-813d38b2fe12-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "816eef70-b240-48ed-93d3-813d38b2fe12" (UID: "816eef70-b240-48ed-93d3-813d38b2fe12"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 02:01:06 crc kubenswrapper[4809]: I0930 02:01:06.843918 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/816eef70-b240-48ed-93d3-813d38b2fe12-config-data" (OuterVolumeSpecName: "config-data") pod "816eef70-b240-48ed-93d3-813d38b2fe12" (UID: "816eef70-b240-48ed-93d3-813d38b2fe12"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 02:01:06 crc kubenswrapper[4809]: I0930 02:01:06.873334 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/816eef70-b240-48ed-93d3-813d38b2fe12-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 02:01:06 crc kubenswrapper[4809]: I0930 02:01:06.873384 4809 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/816eef70-b240-48ed-93d3-813d38b2fe12-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 02:01:06 crc kubenswrapper[4809]: I0930 02:01:06.873399 4809 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/816eef70-b240-48ed-93d3-813d38b2fe12-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 02:01:06 crc kubenswrapper[4809]: I0930 02:01:06.873410 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmmzd\" (UniqueName: \"kubernetes.io/projected/816eef70-b240-48ed-93d3-813d38b2fe12-kube-api-access-gmmzd\") on node \"crc\" DevicePath \"\"" Sep 30 02:01:07 crc kubenswrapper[4809]: I0930 02:01:07.236095 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319961-shptp" event={"ID":"816eef70-b240-48ed-93d3-813d38b2fe12","Type":"ContainerDied","Data":"8beff6ba2f20b5ac174d849fcb35edbc71b15e5f877d201caa91966ed3682210"} Sep 30 02:01:07 crc kubenswrapper[4809]: I0930 02:01:07.236376 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8beff6ba2f20b5ac174d849fcb35edbc71b15e5f877d201caa91966ed3682210" Sep 30 02:01:07 crc kubenswrapper[4809]: I0930 02:01:07.236246 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319961-shptp" Sep 30 02:01:08 crc kubenswrapper[4809]: I0930 02:01:08.712462 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vk5gz" Sep 30 02:01:08 crc kubenswrapper[4809]: I0930 02:01:08.776255 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vk5gz" Sep 30 02:01:08 crc kubenswrapper[4809]: I0930 02:01:08.954568 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vk5gz"] Sep 30 02:01:10 crc kubenswrapper[4809]: I0930 02:01:10.283916 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vk5gz" podUID="84d5d561-6198-4254-9c20-b0873920b063" containerName="registry-server" containerID="cri-o://2dc920b7c6ab703050fb8131d038ca77069a067ca060c1cf88b8bfb70bc7aebd" gracePeriod=2 Sep 30 02:01:10 crc kubenswrapper[4809]: I0930 02:01:10.839468 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vk5gz" Sep 30 02:01:10 crc kubenswrapper[4809]: I0930 02:01:10.966531 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84d5d561-6198-4254-9c20-b0873920b063-catalog-content\") pod \"84d5d561-6198-4254-9c20-b0873920b063\" (UID: \"84d5d561-6198-4254-9c20-b0873920b063\") " Sep 30 02:01:10 crc kubenswrapper[4809]: I0930 02:01:10.966713 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwqpz\" (UniqueName: \"kubernetes.io/projected/84d5d561-6198-4254-9c20-b0873920b063-kube-api-access-bwqpz\") pod \"84d5d561-6198-4254-9c20-b0873920b063\" (UID: \"84d5d561-6198-4254-9c20-b0873920b063\") " Sep 30 02:01:10 crc kubenswrapper[4809]: I0930 02:01:10.967709 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84d5d561-6198-4254-9c20-b0873920b063-utilities\") pod \"84d5d561-6198-4254-9c20-b0873920b063\" (UID: \"84d5d561-6198-4254-9c20-b0873920b063\") " Sep 30 02:01:10 crc kubenswrapper[4809]: I0930 02:01:10.968445 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84d5d561-6198-4254-9c20-b0873920b063-utilities" (OuterVolumeSpecName: "utilities") pod "84d5d561-6198-4254-9c20-b0873920b063" (UID: "84d5d561-6198-4254-9c20-b0873920b063"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:01:10 crc kubenswrapper[4809]: I0930 02:01:10.972012 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84d5d561-6198-4254-9c20-b0873920b063-kube-api-access-bwqpz" (OuterVolumeSpecName: "kube-api-access-bwqpz") pod "84d5d561-6198-4254-9c20-b0873920b063" (UID: "84d5d561-6198-4254-9c20-b0873920b063"). InnerVolumeSpecName "kube-api-access-bwqpz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:01:11 crc kubenswrapper[4809]: I0930 02:01:11.044942 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84d5d561-6198-4254-9c20-b0873920b063-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "84d5d561-6198-4254-9c20-b0873920b063" (UID: "84d5d561-6198-4254-9c20-b0873920b063"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:01:11 crc kubenswrapper[4809]: I0930 02:01:11.069986 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84d5d561-6198-4254-9c20-b0873920b063-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 02:01:11 crc kubenswrapper[4809]: I0930 02:01:11.070029 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84d5d561-6198-4254-9c20-b0873920b063-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 02:01:11 crc kubenswrapper[4809]: I0930 02:01:11.070045 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwqpz\" (UniqueName: \"kubernetes.io/projected/84d5d561-6198-4254-9c20-b0873920b063-kube-api-access-bwqpz\") on node \"crc\" DevicePath \"\"" Sep 30 02:01:11 crc kubenswrapper[4809]: I0930 02:01:11.312917 4809 generic.go:334] "Generic (PLEG): container finished" podID="84d5d561-6198-4254-9c20-b0873920b063" containerID="2dc920b7c6ab703050fb8131d038ca77069a067ca060c1cf88b8bfb70bc7aebd" exitCode=0 Sep 30 02:01:11 crc kubenswrapper[4809]: I0930 02:01:11.312971 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vk5gz" event={"ID":"84d5d561-6198-4254-9c20-b0873920b063","Type":"ContainerDied","Data":"2dc920b7c6ab703050fb8131d038ca77069a067ca060c1cf88b8bfb70bc7aebd"} Sep 30 02:01:11 crc kubenswrapper[4809]: I0930 02:01:11.313000 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vk5gz" event={"ID":"84d5d561-6198-4254-9c20-b0873920b063","Type":"ContainerDied","Data":"75179ef0a35815075d528325d752d91a1333a826b8f9d33219d93f7fd29874f8"} Sep 30 02:01:11 crc kubenswrapper[4809]: I0930 02:01:11.313016 4809 scope.go:117] "RemoveContainer" containerID="2dc920b7c6ab703050fb8131d038ca77069a067ca060c1cf88b8bfb70bc7aebd" Sep 30 02:01:11 crc kubenswrapper[4809]: I0930 02:01:11.313138 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vk5gz" Sep 30 02:01:11 crc kubenswrapper[4809]: I0930 02:01:11.355396 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vk5gz"] Sep 30 02:01:11 crc kubenswrapper[4809]: I0930 02:01:11.356760 4809 scope.go:117] "RemoveContainer" containerID="022136811f813bd592f5d1dc7b9f7a0b8b48dc53f231cf304885b24728dc2d90" Sep 30 02:01:11 crc kubenswrapper[4809]: I0930 02:01:11.369082 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vk5gz"] Sep 30 02:01:11 crc kubenswrapper[4809]: I0930 02:01:11.389107 4809 scope.go:117] "RemoveContainer" containerID="e1d421767d84c28e3c40b506fb825152551cc0e5d5abd47ed26078fc0673229f" Sep 30 02:01:11 crc kubenswrapper[4809]: I0930 02:01:11.448370 4809 scope.go:117] "RemoveContainer" containerID="2dc920b7c6ab703050fb8131d038ca77069a067ca060c1cf88b8bfb70bc7aebd" Sep 30 02:01:11 crc kubenswrapper[4809]: E0930 02:01:11.448805 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2dc920b7c6ab703050fb8131d038ca77069a067ca060c1cf88b8bfb70bc7aebd\": container with ID starting with 2dc920b7c6ab703050fb8131d038ca77069a067ca060c1cf88b8bfb70bc7aebd not found: ID does not exist" containerID="2dc920b7c6ab703050fb8131d038ca77069a067ca060c1cf88b8bfb70bc7aebd" Sep 30 02:01:11 crc kubenswrapper[4809]: I0930 02:01:11.448843 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2dc920b7c6ab703050fb8131d038ca77069a067ca060c1cf88b8bfb70bc7aebd"} err="failed to get container status \"2dc920b7c6ab703050fb8131d038ca77069a067ca060c1cf88b8bfb70bc7aebd\": rpc error: code = NotFound desc = could not find container \"2dc920b7c6ab703050fb8131d038ca77069a067ca060c1cf88b8bfb70bc7aebd\": container with ID starting with 2dc920b7c6ab703050fb8131d038ca77069a067ca060c1cf88b8bfb70bc7aebd not found: ID does not exist" Sep 30 02:01:11 crc kubenswrapper[4809]: I0930 02:01:11.448869 4809 scope.go:117] "RemoveContainer" containerID="022136811f813bd592f5d1dc7b9f7a0b8b48dc53f231cf304885b24728dc2d90" Sep 30 02:01:11 crc kubenswrapper[4809]: E0930 02:01:11.449131 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"022136811f813bd592f5d1dc7b9f7a0b8b48dc53f231cf304885b24728dc2d90\": container with ID starting with 022136811f813bd592f5d1dc7b9f7a0b8b48dc53f231cf304885b24728dc2d90 not found: ID does not exist" containerID="022136811f813bd592f5d1dc7b9f7a0b8b48dc53f231cf304885b24728dc2d90" Sep 30 02:01:11 crc kubenswrapper[4809]: I0930 02:01:11.449164 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"022136811f813bd592f5d1dc7b9f7a0b8b48dc53f231cf304885b24728dc2d90"} err="failed to get container status \"022136811f813bd592f5d1dc7b9f7a0b8b48dc53f231cf304885b24728dc2d90\": rpc error: code = NotFound desc = could not find container \"022136811f813bd592f5d1dc7b9f7a0b8b48dc53f231cf304885b24728dc2d90\": container with ID starting with 022136811f813bd592f5d1dc7b9f7a0b8b48dc53f231cf304885b24728dc2d90 not found: ID does not exist" Sep 30 02:01:11 crc kubenswrapper[4809]: I0930 02:01:11.449184 4809 scope.go:117] "RemoveContainer" containerID="e1d421767d84c28e3c40b506fb825152551cc0e5d5abd47ed26078fc0673229f" Sep 30 02:01:11 crc kubenswrapper[4809]: E0930 02:01:11.449383 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1d421767d84c28e3c40b506fb825152551cc0e5d5abd47ed26078fc0673229f\": container with ID starting with e1d421767d84c28e3c40b506fb825152551cc0e5d5abd47ed26078fc0673229f not found: ID does not exist" containerID="e1d421767d84c28e3c40b506fb825152551cc0e5d5abd47ed26078fc0673229f" Sep 30 02:01:11 crc kubenswrapper[4809]: I0930 02:01:11.449406 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1d421767d84c28e3c40b506fb825152551cc0e5d5abd47ed26078fc0673229f"} err="failed to get container status \"e1d421767d84c28e3c40b506fb825152551cc0e5d5abd47ed26078fc0673229f\": rpc error: code = NotFound desc = could not find container \"e1d421767d84c28e3c40b506fb825152551cc0e5d5abd47ed26078fc0673229f\": container with ID starting with e1d421767d84c28e3c40b506fb825152551cc0e5d5abd47ed26078fc0673229f not found: ID does not exist" Sep 30 02:01:11 crc kubenswrapper[4809]: I0930 02:01:11.704587 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84d5d561-6198-4254-9c20-b0873920b063" path="/var/lib/kubelet/pods/84d5d561-6198-4254-9c20-b0873920b063/volumes" Sep 30 02:02:55 crc kubenswrapper[4809]: I0930 02:02:55.324784 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:02:55 crc kubenswrapper[4809]: I0930 02:02:55.325453 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:02:59 crc kubenswrapper[4809]: I0930 02:02:59.357792 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4bxnk"] Sep 30 02:02:59 crc kubenswrapper[4809]: E0930 02:02:59.359318 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="816eef70-b240-48ed-93d3-813d38b2fe12" containerName="keystone-cron" Sep 30 02:02:59 crc kubenswrapper[4809]: I0930 02:02:59.359340 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="816eef70-b240-48ed-93d3-813d38b2fe12" containerName="keystone-cron" Sep 30 02:02:59 crc kubenswrapper[4809]: E0930 02:02:59.359384 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84d5d561-6198-4254-9c20-b0873920b063" containerName="registry-server" Sep 30 02:02:59 crc kubenswrapper[4809]: I0930 02:02:59.359397 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="84d5d561-6198-4254-9c20-b0873920b063" containerName="registry-server" Sep 30 02:02:59 crc kubenswrapper[4809]: E0930 02:02:59.359436 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84d5d561-6198-4254-9c20-b0873920b063" containerName="extract-content" Sep 30 02:02:59 crc kubenswrapper[4809]: I0930 02:02:59.359448 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="84d5d561-6198-4254-9c20-b0873920b063" containerName="extract-content" Sep 30 02:02:59 crc kubenswrapper[4809]: E0930 02:02:59.359470 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84d5d561-6198-4254-9c20-b0873920b063" containerName="extract-utilities" Sep 30 02:02:59 crc kubenswrapper[4809]: I0930 02:02:59.359482 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="84d5d561-6198-4254-9c20-b0873920b063" containerName="extract-utilities" Sep 30 02:02:59 crc kubenswrapper[4809]: I0930 02:02:59.359957 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="816eef70-b240-48ed-93d3-813d38b2fe12" containerName="keystone-cron" Sep 30 02:02:59 crc kubenswrapper[4809]: I0930 02:02:59.360012 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="84d5d561-6198-4254-9c20-b0873920b063" containerName="registry-server" Sep 30 02:02:59 crc kubenswrapper[4809]: I0930 02:02:59.363155 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4bxnk" Sep 30 02:02:59 crc kubenswrapper[4809]: I0930 02:02:59.398271 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4bxnk"] Sep 30 02:02:59 crc kubenswrapper[4809]: I0930 02:02:59.454684 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f5d7bd-14df-45b3-8ab2-6a932e4b78ed-catalog-content\") pod \"certified-operators-4bxnk\" (UID: \"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed\") " pod="openshift-marketplace/certified-operators-4bxnk" Sep 30 02:02:59 crc kubenswrapper[4809]: I0930 02:02:59.454772 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f5d7bd-14df-45b3-8ab2-6a932e4b78ed-utilities\") pod \"certified-operators-4bxnk\" (UID: \"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed\") " pod="openshift-marketplace/certified-operators-4bxnk" Sep 30 02:02:59 crc kubenswrapper[4809]: I0930 02:02:59.455201 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfxlh\" (UniqueName: \"kubernetes.io/projected/62f5d7bd-14df-45b3-8ab2-6a932e4b78ed-kube-api-access-mfxlh\") pod \"certified-operators-4bxnk\" (UID: \"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed\") " pod="openshift-marketplace/certified-operators-4bxnk" Sep 30 02:02:59 crc kubenswrapper[4809]: I0930 02:02:59.557745 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f5d7bd-14df-45b3-8ab2-6a932e4b78ed-utilities\") pod \"certified-operators-4bxnk\" (UID: \"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed\") " pod="openshift-marketplace/certified-operators-4bxnk" Sep 30 02:02:59 crc kubenswrapper[4809]: I0930 02:02:59.557888 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfxlh\" (UniqueName: \"kubernetes.io/projected/62f5d7bd-14df-45b3-8ab2-6a932e4b78ed-kube-api-access-mfxlh\") pod \"certified-operators-4bxnk\" (UID: \"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed\") " pod="openshift-marketplace/certified-operators-4bxnk" Sep 30 02:02:59 crc kubenswrapper[4809]: I0930 02:02:59.558006 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f5d7bd-14df-45b3-8ab2-6a932e4b78ed-catalog-content\") pod \"certified-operators-4bxnk\" (UID: \"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed\") " pod="openshift-marketplace/certified-operators-4bxnk" Sep 30 02:02:59 crc kubenswrapper[4809]: I0930 02:02:59.558385 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f5d7bd-14df-45b3-8ab2-6a932e4b78ed-utilities\") pod \"certified-operators-4bxnk\" (UID: \"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed\") " pod="openshift-marketplace/certified-operators-4bxnk" Sep 30 02:02:59 crc kubenswrapper[4809]: I0930 02:02:59.558683 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f5d7bd-14df-45b3-8ab2-6a932e4b78ed-catalog-content\") pod \"certified-operators-4bxnk\" (UID: \"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed\") " pod="openshift-marketplace/certified-operators-4bxnk" Sep 30 02:02:59 crc kubenswrapper[4809]: I0930 02:02:59.587025 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfxlh\" (UniqueName: \"kubernetes.io/projected/62f5d7bd-14df-45b3-8ab2-6a932e4b78ed-kube-api-access-mfxlh\") pod \"certified-operators-4bxnk\" (UID: \"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed\") " pod="openshift-marketplace/certified-operators-4bxnk" Sep 30 02:02:59 crc kubenswrapper[4809]: I0930 02:02:59.700943 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4bxnk" Sep 30 02:03:00 crc kubenswrapper[4809]: I0930 02:03:00.287282 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4bxnk"] Sep 30 02:03:00 crc kubenswrapper[4809]: I0930 02:03:00.656115 4809 generic.go:334] "Generic (PLEG): container finished" podID="62f5d7bd-14df-45b3-8ab2-6a932e4b78ed" containerID="b779f2a46bfe86b73274e9c83150053c46ae3e5ad7282fb8fb26740c5a049fb0" exitCode=0 Sep 30 02:03:00 crc kubenswrapper[4809]: I0930 02:03:00.656627 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bxnk" event={"ID":"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed","Type":"ContainerDied","Data":"b779f2a46bfe86b73274e9c83150053c46ae3e5ad7282fb8fb26740c5a049fb0"} Sep 30 02:03:00 crc kubenswrapper[4809]: I0930 02:03:00.656689 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bxnk" event={"ID":"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed","Type":"ContainerStarted","Data":"ab3a25c5951ec121ab119baee5e69a88c22d1793faac607f70b32b2f74267247"} Sep 30 02:03:00 crc kubenswrapper[4809]: I0930 02:03:00.760209 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dlls4"] Sep 30 02:03:00 crc kubenswrapper[4809]: I0930 02:03:00.762523 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dlls4" Sep 30 02:03:00 crc kubenswrapper[4809]: I0930 02:03:00.793369 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dlls4"] Sep 30 02:03:00 crc kubenswrapper[4809]: I0930 02:03:00.891930 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whhg9\" (UniqueName: \"kubernetes.io/projected/303628c9-5695-47c1-894f-5daeea6f6ef3-kube-api-access-whhg9\") pod \"community-operators-dlls4\" (UID: \"303628c9-5695-47c1-894f-5daeea6f6ef3\") " pod="openshift-marketplace/community-operators-dlls4" Sep 30 02:03:00 crc kubenswrapper[4809]: I0930 02:03:00.892300 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/303628c9-5695-47c1-894f-5daeea6f6ef3-utilities\") pod \"community-operators-dlls4\" (UID: \"303628c9-5695-47c1-894f-5daeea6f6ef3\") " pod="openshift-marketplace/community-operators-dlls4" Sep 30 02:03:00 crc kubenswrapper[4809]: I0930 02:03:00.892521 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/303628c9-5695-47c1-894f-5daeea6f6ef3-catalog-content\") pod \"community-operators-dlls4\" (UID: \"303628c9-5695-47c1-894f-5daeea6f6ef3\") " pod="openshift-marketplace/community-operators-dlls4" Sep 30 02:03:00 crc kubenswrapper[4809]: I0930 02:03:00.994323 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/303628c9-5695-47c1-894f-5daeea6f6ef3-utilities\") pod \"community-operators-dlls4\" (UID: \"303628c9-5695-47c1-894f-5daeea6f6ef3\") " pod="openshift-marketplace/community-operators-dlls4" Sep 30 02:03:00 crc kubenswrapper[4809]: I0930 02:03:00.994477 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/303628c9-5695-47c1-894f-5daeea6f6ef3-catalog-content\") pod \"community-operators-dlls4\" (UID: \"303628c9-5695-47c1-894f-5daeea6f6ef3\") " pod="openshift-marketplace/community-operators-dlls4" Sep 30 02:03:00 crc kubenswrapper[4809]: I0930 02:03:00.994554 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whhg9\" (UniqueName: \"kubernetes.io/projected/303628c9-5695-47c1-894f-5daeea6f6ef3-kube-api-access-whhg9\") pod \"community-operators-dlls4\" (UID: \"303628c9-5695-47c1-894f-5daeea6f6ef3\") " pod="openshift-marketplace/community-operators-dlls4" Sep 30 02:03:00 crc kubenswrapper[4809]: I0930 02:03:00.995053 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/303628c9-5695-47c1-894f-5daeea6f6ef3-utilities\") pod \"community-operators-dlls4\" (UID: \"303628c9-5695-47c1-894f-5daeea6f6ef3\") " pod="openshift-marketplace/community-operators-dlls4" Sep 30 02:03:00 crc kubenswrapper[4809]: I0930 02:03:00.995254 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/303628c9-5695-47c1-894f-5daeea6f6ef3-catalog-content\") pod \"community-operators-dlls4\" (UID: \"303628c9-5695-47c1-894f-5daeea6f6ef3\") " pod="openshift-marketplace/community-operators-dlls4" Sep 30 02:03:01 crc kubenswrapper[4809]: I0930 02:03:01.017657 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whhg9\" (UniqueName: \"kubernetes.io/projected/303628c9-5695-47c1-894f-5daeea6f6ef3-kube-api-access-whhg9\") pod \"community-operators-dlls4\" (UID: \"303628c9-5695-47c1-894f-5daeea6f6ef3\") " pod="openshift-marketplace/community-operators-dlls4" Sep 30 02:03:01 crc kubenswrapper[4809]: I0930 02:03:01.103469 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dlls4" Sep 30 02:03:01 crc kubenswrapper[4809]: I0930 02:03:01.729508 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dlls4"] Sep 30 02:03:01 crc kubenswrapper[4809]: I0930 02:03:01.748534 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kdb7w"] Sep 30 02:03:01 crc kubenswrapper[4809]: I0930 02:03:01.751098 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kdb7w" Sep 30 02:03:01 crc kubenswrapper[4809]: W0930 02:03:01.754118 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod303628c9_5695_47c1_894f_5daeea6f6ef3.slice/crio-8cf6e3eb61b02479392584e16bae3bfb93aba22de4727ffc18e73e878d2303b4 WatchSource:0}: Error finding container 8cf6e3eb61b02479392584e16bae3bfb93aba22de4727ffc18e73e878d2303b4: Status 404 returned error can't find the container with id 8cf6e3eb61b02479392584e16bae3bfb93aba22de4727ffc18e73e878d2303b4 Sep 30 02:03:01 crc kubenswrapper[4809]: I0930 02:03:01.767074 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kdb7w"] Sep 30 02:03:01 crc kubenswrapper[4809]: I0930 02:03:01.916976 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab514495-c155-45b9-a2cf-6dd73ba16f85-catalog-content\") pod \"redhat-marketplace-kdb7w\" (UID: \"ab514495-c155-45b9-a2cf-6dd73ba16f85\") " pod="openshift-marketplace/redhat-marketplace-kdb7w" Sep 30 02:03:01 crc kubenswrapper[4809]: I0930 02:03:01.917447 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab514495-c155-45b9-a2cf-6dd73ba16f85-utilities\") pod \"redhat-marketplace-kdb7w\" (UID: \"ab514495-c155-45b9-a2cf-6dd73ba16f85\") " pod="openshift-marketplace/redhat-marketplace-kdb7w" Sep 30 02:03:01 crc kubenswrapper[4809]: I0930 02:03:01.917784 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75kj2\" (UniqueName: \"kubernetes.io/projected/ab514495-c155-45b9-a2cf-6dd73ba16f85-kube-api-access-75kj2\") pod \"redhat-marketplace-kdb7w\" (UID: \"ab514495-c155-45b9-a2cf-6dd73ba16f85\") " pod="openshift-marketplace/redhat-marketplace-kdb7w" Sep 30 02:03:02 crc kubenswrapper[4809]: I0930 02:03:02.022324 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75kj2\" (UniqueName: \"kubernetes.io/projected/ab514495-c155-45b9-a2cf-6dd73ba16f85-kube-api-access-75kj2\") pod \"redhat-marketplace-kdb7w\" (UID: \"ab514495-c155-45b9-a2cf-6dd73ba16f85\") " pod="openshift-marketplace/redhat-marketplace-kdb7w" Sep 30 02:03:02 crc kubenswrapper[4809]: I0930 02:03:02.022402 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab514495-c155-45b9-a2cf-6dd73ba16f85-catalog-content\") pod \"redhat-marketplace-kdb7w\" (UID: \"ab514495-c155-45b9-a2cf-6dd73ba16f85\") " pod="openshift-marketplace/redhat-marketplace-kdb7w" Sep 30 02:03:02 crc kubenswrapper[4809]: I0930 02:03:02.022444 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab514495-c155-45b9-a2cf-6dd73ba16f85-utilities\") pod \"redhat-marketplace-kdb7w\" (UID: \"ab514495-c155-45b9-a2cf-6dd73ba16f85\") " pod="openshift-marketplace/redhat-marketplace-kdb7w" Sep 30 02:03:02 crc kubenswrapper[4809]: I0930 02:03:02.022980 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab514495-c155-45b9-a2cf-6dd73ba16f85-utilities\") pod \"redhat-marketplace-kdb7w\" (UID: \"ab514495-c155-45b9-a2cf-6dd73ba16f85\") " pod="openshift-marketplace/redhat-marketplace-kdb7w" Sep 30 02:03:02 crc kubenswrapper[4809]: I0930 02:03:02.023254 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab514495-c155-45b9-a2cf-6dd73ba16f85-catalog-content\") pod \"redhat-marketplace-kdb7w\" (UID: \"ab514495-c155-45b9-a2cf-6dd73ba16f85\") " pod="openshift-marketplace/redhat-marketplace-kdb7w" Sep 30 02:03:02 crc kubenswrapper[4809]: I0930 02:03:02.047657 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75kj2\" (UniqueName: \"kubernetes.io/projected/ab514495-c155-45b9-a2cf-6dd73ba16f85-kube-api-access-75kj2\") pod \"redhat-marketplace-kdb7w\" (UID: \"ab514495-c155-45b9-a2cf-6dd73ba16f85\") " pod="openshift-marketplace/redhat-marketplace-kdb7w" Sep 30 02:03:02 crc kubenswrapper[4809]: I0930 02:03:02.104204 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kdb7w" Sep 30 02:03:02 crc kubenswrapper[4809]: I0930 02:03:02.626670 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kdb7w"] Sep 30 02:03:02 crc kubenswrapper[4809]: I0930 02:03:02.681818 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdb7w" event={"ID":"ab514495-c155-45b9-a2cf-6dd73ba16f85","Type":"ContainerStarted","Data":"69bacfcac132ada5ee34ab98b530349c496bbbae4830ff8fc08107802d6906a1"} Sep 30 02:03:02 crc kubenswrapper[4809]: I0930 02:03:02.684289 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bxnk" event={"ID":"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed","Type":"ContainerStarted","Data":"edef16abf5dbb8424a2e3e81a8a6e10928ff67be9ca0036f14fd0170cad3c0cb"} Sep 30 02:03:02 crc kubenswrapper[4809]: I0930 02:03:02.689035 4809 generic.go:334] "Generic (PLEG): container finished" podID="303628c9-5695-47c1-894f-5daeea6f6ef3" containerID="eb95a26db9472b8eff00c08b56e2d866b6529350d5b59ffb79275fbd6040fb07" exitCode=0 Sep 30 02:03:02 crc kubenswrapper[4809]: I0930 02:03:02.689085 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dlls4" event={"ID":"303628c9-5695-47c1-894f-5daeea6f6ef3","Type":"ContainerDied","Data":"eb95a26db9472b8eff00c08b56e2d866b6529350d5b59ffb79275fbd6040fb07"} Sep 30 02:03:02 crc kubenswrapper[4809]: I0930 02:03:02.689117 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dlls4" event={"ID":"303628c9-5695-47c1-894f-5daeea6f6ef3","Type":"ContainerStarted","Data":"8cf6e3eb61b02479392584e16bae3bfb93aba22de4727ffc18e73e878d2303b4"} Sep 30 02:03:03 crc kubenswrapper[4809]: I0930 02:03:03.707822 4809 generic.go:334] "Generic (PLEG): container finished" podID="ab514495-c155-45b9-a2cf-6dd73ba16f85" containerID="1f410d50cf1e49fd99c4b6f62f3e462e7f09776f12a3ca6b65212523f9bf611b" exitCode=0 Sep 30 02:03:03 crc kubenswrapper[4809]: I0930 02:03:03.707984 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdb7w" event={"ID":"ab514495-c155-45b9-a2cf-6dd73ba16f85","Type":"ContainerDied","Data":"1f410d50cf1e49fd99c4b6f62f3e462e7f09776f12a3ca6b65212523f9bf611b"} Sep 30 02:03:03 crc kubenswrapper[4809]: I0930 02:03:03.711397 4809 generic.go:334] "Generic (PLEG): container finished" podID="62f5d7bd-14df-45b3-8ab2-6a932e4b78ed" containerID="edef16abf5dbb8424a2e3e81a8a6e10928ff67be9ca0036f14fd0170cad3c0cb" exitCode=0 Sep 30 02:03:03 crc kubenswrapper[4809]: I0930 02:03:03.711443 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bxnk" event={"ID":"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed","Type":"ContainerDied","Data":"edef16abf5dbb8424a2e3e81a8a6e10928ff67be9ca0036f14fd0170cad3c0cb"} Sep 30 02:03:04 crc kubenswrapper[4809]: I0930 02:03:04.735799 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dlls4" event={"ID":"303628c9-5695-47c1-894f-5daeea6f6ef3","Type":"ContainerStarted","Data":"b64028a6b3d7720bf6c38a72fdfc44df1ccf8826103596f05dcc00d46e16298e"} Sep 30 02:03:04 crc kubenswrapper[4809]: I0930 02:03:04.738018 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bxnk" event={"ID":"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed","Type":"ContainerStarted","Data":"021f8bf05f269c80721d5d8d9c21bfc126a9e91eb4f83c922a42ba2cfd37af09"} Sep 30 02:03:04 crc kubenswrapper[4809]: I0930 02:03:04.781724 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4bxnk" podStartSLOduration=2.160506828 podStartE2EDuration="5.781703884s" podCreationTimestamp="2025-09-30 02:02:59 +0000 UTC" firstStartedPulling="2025-09-30 02:03:00.662342822 +0000 UTC m=+6831.698592230" lastFinishedPulling="2025-09-30 02:03:04.283539848 +0000 UTC m=+6835.319789286" observedRunningTime="2025-09-30 02:03:04.779690978 +0000 UTC m=+6835.815940396" watchObservedRunningTime="2025-09-30 02:03:04.781703884 +0000 UTC m=+6835.817953292" Sep 30 02:03:05 crc kubenswrapper[4809]: E0930 02:03:05.741403 4809 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod303628c9_5695_47c1_894f_5daeea6f6ef3.slice/crio-conmon-b64028a6b3d7720bf6c38a72fdfc44df1ccf8826103596f05dcc00d46e16298e.scope\": RecentStats: unable to find data in memory cache]" Sep 30 02:03:05 crc kubenswrapper[4809]: I0930 02:03:05.749631 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdb7w" event={"ID":"ab514495-c155-45b9-a2cf-6dd73ba16f85","Type":"ContainerStarted","Data":"c3d8f55333997d1324bfe2e904dd299f830d1df733ecc91f47bff19672e0460c"} Sep 30 02:03:05 crc kubenswrapper[4809]: I0930 02:03:05.752883 4809 generic.go:334] "Generic (PLEG): container finished" podID="303628c9-5695-47c1-894f-5daeea6f6ef3" containerID="b64028a6b3d7720bf6c38a72fdfc44df1ccf8826103596f05dcc00d46e16298e" exitCode=0 Sep 30 02:03:05 crc kubenswrapper[4809]: I0930 02:03:05.752976 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dlls4" event={"ID":"303628c9-5695-47c1-894f-5daeea6f6ef3","Type":"ContainerDied","Data":"b64028a6b3d7720bf6c38a72fdfc44df1ccf8826103596f05dcc00d46e16298e"} Sep 30 02:03:06 crc kubenswrapper[4809]: I0930 02:03:06.770127 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dlls4" event={"ID":"303628c9-5695-47c1-894f-5daeea6f6ef3","Type":"ContainerStarted","Data":"69497c887dcbbbf7bdbe6e556cd7c54dc3ea28b255722ff5048d97f75258d46b"} Sep 30 02:03:06 crc kubenswrapper[4809]: I0930 02:03:06.775035 4809 generic.go:334] "Generic (PLEG): container finished" podID="ab514495-c155-45b9-a2cf-6dd73ba16f85" containerID="c3d8f55333997d1324bfe2e904dd299f830d1df733ecc91f47bff19672e0460c" exitCode=0 Sep 30 02:03:06 crc kubenswrapper[4809]: I0930 02:03:06.775395 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdb7w" event={"ID":"ab514495-c155-45b9-a2cf-6dd73ba16f85","Type":"ContainerDied","Data":"c3d8f55333997d1324bfe2e904dd299f830d1df733ecc91f47bff19672e0460c"} Sep 30 02:03:06 crc kubenswrapper[4809]: I0930 02:03:06.799725 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dlls4" podStartSLOduration=3.317403439 podStartE2EDuration="6.799707817s" podCreationTimestamp="2025-09-30 02:03:00 +0000 UTC" firstStartedPulling="2025-09-30 02:03:02.690791211 +0000 UTC m=+6833.727040629" lastFinishedPulling="2025-09-30 02:03:06.173095599 +0000 UTC m=+6837.209345007" observedRunningTime="2025-09-30 02:03:06.797067335 +0000 UTC m=+6837.833316743" watchObservedRunningTime="2025-09-30 02:03:06.799707817 +0000 UTC m=+6837.835957225" Sep 30 02:03:07 crc kubenswrapper[4809]: I0930 02:03:07.789462 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdb7w" event={"ID":"ab514495-c155-45b9-a2cf-6dd73ba16f85","Type":"ContainerStarted","Data":"8ba8276dcb5be98f7c797e34b3c581ff98a20751857615734b305c88fb45cf3c"} Sep 30 02:03:07 crc kubenswrapper[4809]: I0930 02:03:07.825360 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kdb7w" podStartSLOduration=3.358426992 podStartE2EDuration="6.825342298s" podCreationTimestamp="2025-09-30 02:03:01 +0000 UTC" firstStartedPulling="2025-09-30 02:03:03.718416277 +0000 UTC m=+6834.754665685" lastFinishedPulling="2025-09-30 02:03:07.185331583 +0000 UTC m=+6838.221580991" observedRunningTime="2025-09-30 02:03:07.818882131 +0000 UTC m=+6838.855131549" watchObservedRunningTime="2025-09-30 02:03:07.825342298 +0000 UTC m=+6838.861591706" Sep 30 02:03:09 crc kubenswrapper[4809]: I0930 02:03:09.712372 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4bxnk" Sep 30 02:03:09 crc kubenswrapper[4809]: I0930 02:03:09.712687 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4bxnk" Sep 30 02:03:10 crc kubenswrapper[4809]: I0930 02:03:10.772634 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-4bxnk" podUID="62f5d7bd-14df-45b3-8ab2-6a932e4b78ed" containerName="registry-server" probeResult="failure" output=< Sep 30 02:03:10 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 02:03:10 crc kubenswrapper[4809]: > Sep 30 02:03:11 crc kubenswrapper[4809]: I0930 02:03:11.105409 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dlls4" Sep 30 02:03:11 crc kubenswrapper[4809]: I0930 02:03:11.105460 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dlls4" Sep 30 02:03:11 crc kubenswrapper[4809]: I0930 02:03:11.164847 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dlls4" Sep 30 02:03:11 crc kubenswrapper[4809]: I0930 02:03:11.890281 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dlls4" Sep 30 02:03:12 crc kubenswrapper[4809]: I0930 02:03:12.104712 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kdb7w" Sep 30 02:03:12 crc kubenswrapper[4809]: I0930 02:03:12.105101 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kdb7w" Sep 30 02:03:12 crc kubenswrapper[4809]: I0930 02:03:12.201847 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kdb7w" Sep 30 02:03:12 crc kubenswrapper[4809]: I0930 02:03:12.902175 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kdb7w" Sep 30 02:03:13 crc kubenswrapper[4809]: I0930 02:03:13.543755 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dlls4"] Sep 30 02:03:13 crc kubenswrapper[4809]: I0930 02:03:13.857277 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dlls4" podUID="303628c9-5695-47c1-894f-5daeea6f6ef3" containerName="registry-server" containerID="cri-o://69497c887dcbbbf7bdbe6e556cd7c54dc3ea28b255722ff5048d97f75258d46b" gracePeriod=2 Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.460910 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dlls4" Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.537813 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kdb7w"] Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.570371 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/303628c9-5695-47c1-894f-5daeea6f6ef3-utilities\") pod \"303628c9-5695-47c1-894f-5daeea6f6ef3\" (UID: \"303628c9-5695-47c1-894f-5daeea6f6ef3\") " Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.570556 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whhg9\" (UniqueName: \"kubernetes.io/projected/303628c9-5695-47c1-894f-5daeea6f6ef3-kube-api-access-whhg9\") pod \"303628c9-5695-47c1-894f-5daeea6f6ef3\" (UID: \"303628c9-5695-47c1-894f-5daeea6f6ef3\") " Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.570756 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/303628c9-5695-47c1-894f-5daeea6f6ef3-catalog-content\") pod \"303628c9-5695-47c1-894f-5daeea6f6ef3\" (UID: \"303628c9-5695-47c1-894f-5daeea6f6ef3\") " Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.571545 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/303628c9-5695-47c1-894f-5daeea6f6ef3-utilities" (OuterVolumeSpecName: "utilities") pod "303628c9-5695-47c1-894f-5daeea6f6ef3" (UID: "303628c9-5695-47c1-894f-5daeea6f6ef3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.582805 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/303628c9-5695-47c1-894f-5daeea6f6ef3-kube-api-access-whhg9" (OuterVolumeSpecName: "kube-api-access-whhg9") pod "303628c9-5695-47c1-894f-5daeea6f6ef3" (UID: "303628c9-5695-47c1-894f-5daeea6f6ef3"). InnerVolumeSpecName "kube-api-access-whhg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.614878 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/303628c9-5695-47c1-894f-5daeea6f6ef3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "303628c9-5695-47c1-894f-5daeea6f6ef3" (UID: "303628c9-5695-47c1-894f-5daeea6f6ef3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.674197 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/303628c9-5695-47c1-894f-5daeea6f6ef3-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.674484 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whhg9\" (UniqueName: \"kubernetes.io/projected/303628c9-5695-47c1-894f-5daeea6f6ef3-kube-api-access-whhg9\") on node \"crc\" DevicePath \"\"" Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.674618 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/303628c9-5695-47c1-894f-5daeea6f6ef3-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.869684 4809 generic.go:334] "Generic (PLEG): container finished" podID="303628c9-5695-47c1-894f-5daeea6f6ef3" containerID="69497c887dcbbbf7bdbe6e556cd7c54dc3ea28b255722ff5048d97f75258d46b" exitCode=0 Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.869775 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dlls4" event={"ID":"303628c9-5695-47c1-894f-5daeea6f6ef3","Type":"ContainerDied","Data":"69497c887dcbbbf7bdbe6e556cd7c54dc3ea28b255722ff5048d97f75258d46b"} Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.869784 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dlls4" Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.869809 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dlls4" event={"ID":"303628c9-5695-47c1-894f-5daeea6f6ef3","Type":"ContainerDied","Data":"8cf6e3eb61b02479392584e16bae3bfb93aba22de4727ffc18e73e878d2303b4"} Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.869826 4809 scope.go:117] "RemoveContainer" containerID="69497c887dcbbbf7bdbe6e556cd7c54dc3ea28b255722ff5048d97f75258d46b" Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.870163 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kdb7w" podUID="ab514495-c155-45b9-a2cf-6dd73ba16f85" containerName="registry-server" containerID="cri-o://8ba8276dcb5be98f7c797e34b3c581ff98a20751857615734b305c88fb45cf3c" gracePeriod=2 Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.911940 4809 scope.go:117] "RemoveContainer" containerID="b64028a6b3d7720bf6c38a72fdfc44df1ccf8826103596f05dcc00d46e16298e" Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.971424 4809 scope.go:117] "RemoveContainer" containerID="eb95a26db9472b8eff00c08b56e2d866b6529350d5b59ffb79275fbd6040fb07" Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.985208 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dlls4"] Sep 30 02:03:14 crc kubenswrapper[4809]: I0930 02:03:14.999946 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dlls4"] Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.111871 4809 scope.go:117] "RemoveContainer" containerID="69497c887dcbbbf7bdbe6e556cd7c54dc3ea28b255722ff5048d97f75258d46b" Sep 30 02:03:15 crc kubenswrapper[4809]: E0930 02:03:15.112349 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69497c887dcbbbf7bdbe6e556cd7c54dc3ea28b255722ff5048d97f75258d46b\": container with ID starting with 69497c887dcbbbf7bdbe6e556cd7c54dc3ea28b255722ff5048d97f75258d46b not found: ID does not exist" containerID="69497c887dcbbbf7bdbe6e556cd7c54dc3ea28b255722ff5048d97f75258d46b" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.112412 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69497c887dcbbbf7bdbe6e556cd7c54dc3ea28b255722ff5048d97f75258d46b"} err="failed to get container status \"69497c887dcbbbf7bdbe6e556cd7c54dc3ea28b255722ff5048d97f75258d46b\": rpc error: code = NotFound desc = could not find container \"69497c887dcbbbf7bdbe6e556cd7c54dc3ea28b255722ff5048d97f75258d46b\": container with ID starting with 69497c887dcbbbf7bdbe6e556cd7c54dc3ea28b255722ff5048d97f75258d46b not found: ID does not exist" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.112451 4809 scope.go:117] "RemoveContainer" containerID="b64028a6b3d7720bf6c38a72fdfc44df1ccf8826103596f05dcc00d46e16298e" Sep 30 02:03:15 crc kubenswrapper[4809]: E0930 02:03:15.112842 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b64028a6b3d7720bf6c38a72fdfc44df1ccf8826103596f05dcc00d46e16298e\": container with ID starting with b64028a6b3d7720bf6c38a72fdfc44df1ccf8826103596f05dcc00d46e16298e not found: ID does not exist" containerID="b64028a6b3d7720bf6c38a72fdfc44df1ccf8826103596f05dcc00d46e16298e" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.112873 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b64028a6b3d7720bf6c38a72fdfc44df1ccf8826103596f05dcc00d46e16298e"} err="failed to get container status \"b64028a6b3d7720bf6c38a72fdfc44df1ccf8826103596f05dcc00d46e16298e\": rpc error: code = NotFound desc = could not find container \"b64028a6b3d7720bf6c38a72fdfc44df1ccf8826103596f05dcc00d46e16298e\": container with ID starting with b64028a6b3d7720bf6c38a72fdfc44df1ccf8826103596f05dcc00d46e16298e not found: ID does not exist" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.112892 4809 scope.go:117] "RemoveContainer" containerID="eb95a26db9472b8eff00c08b56e2d866b6529350d5b59ffb79275fbd6040fb07" Sep 30 02:03:15 crc kubenswrapper[4809]: E0930 02:03:15.113169 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb95a26db9472b8eff00c08b56e2d866b6529350d5b59ffb79275fbd6040fb07\": container with ID starting with eb95a26db9472b8eff00c08b56e2d866b6529350d5b59ffb79275fbd6040fb07 not found: ID does not exist" containerID="eb95a26db9472b8eff00c08b56e2d866b6529350d5b59ffb79275fbd6040fb07" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.113206 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb95a26db9472b8eff00c08b56e2d866b6529350d5b59ffb79275fbd6040fb07"} err="failed to get container status \"eb95a26db9472b8eff00c08b56e2d866b6529350d5b59ffb79275fbd6040fb07\": rpc error: code = NotFound desc = could not find container \"eb95a26db9472b8eff00c08b56e2d866b6529350d5b59ffb79275fbd6040fb07\": container with ID starting with eb95a26db9472b8eff00c08b56e2d866b6529350d5b59ffb79275fbd6040fb07 not found: ID does not exist" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.485178 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kdb7w" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.597371 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75kj2\" (UniqueName: \"kubernetes.io/projected/ab514495-c155-45b9-a2cf-6dd73ba16f85-kube-api-access-75kj2\") pod \"ab514495-c155-45b9-a2cf-6dd73ba16f85\" (UID: \"ab514495-c155-45b9-a2cf-6dd73ba16f85\") " Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.597740 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab514495-c155-45b9-a2cf-6dd73ba16f85-utilities\") pod \"ab514495-c155-45b9-a2cf-6dd73ba16f85\" (UID: \"ab514495-c155-45b9-a2cf-6dd73ba16f85\") " Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.597945 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab514495-c155-45b9-a2cf-6dd73ba16f85-catalog-content\") pod \"ab514495-c155-45b9-a2cf-6dd73ba16f85\" (UID: \"ab514495-c155-45b9-a2cf-6dd73ba16f85\") " Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.598667 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab514495-c155-45b9-a2cf-6dd73ba16f85-utilities" (OuterVolumeSpecName: "utilities") pod "ab514495-c155-45b9-a2cf-6dd73ba16f85" (UID: "ab514495-c155-45b9-a2cf-6dd73ba16f85"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.605031 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab514495-c155-45b9-a2cf-6dd73ba16f85-kube-api-access-75kj2" (OuterVolumeSpecName: "kube-api-access-75kj2") pod "ab514495-c155-45b9-a2cf-6dd73ba16f85" (UID: "ab514495-c155-45b9-a2cf-6dd73ba16f85"). InnerVolumeSpecName "kube-api-access-75kj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.628069 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab514495-c155-45b9-a2cf-6dd73ba16f85-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ab514495-c155-45b9-a2cf-6dd73ba16f85" (UID: "ab514495-c155-45b9-a2cf-6dd73ba16f85"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.701706 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="303628c9-5695-47c1-894f-5daeea6f6ef3" path="/var/lib/kubelet/pods/303628c9-5695-47c1-894f-5daeea6f6ef3/volumes" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.709980 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75kj2\" (UniqueName: \"kubernetes.io/projected/ab514495-c155-45b9-a2cf-6dd73ba16f85-kube-api-access-75kj2\") on node \"crc\" DevicePath \"\"" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.710010 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab514495-c155-45b9-a2cf-6dd73ba16f85-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.710021 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab514495-c155-45b9-a2cf-6dd73ba16f85-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.882976 4809 generic.go:334] "Generic (PLEG): container finished" podID="ab514495-c155-45b9-a2cf-6dd73ba16f85" containerID="8ba8276dcb5be98f7c797e34b3c581ff98a20751857615734b305c88fb45cf3c" exitCode=0 Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.883049 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kdb7w" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.883920 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdb7w" event={"ID":"ab514495-c155-45b9-a2cf-6dd73ba16f85","Type":"ContainerDied","Data":"8ba8276dcb5be98f7c797e34b3c581ff98a20751857615734b305c88fb45cf3c"} Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.883966 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdb7w" event={"ID":"ab514495-c155-45b9-a2cf-6dd73ba16f85","Type":"ContainerDied","Data":"69bacfcac132ada5ee34ab98b530349c496bbbae4830ff8fc08107802d6906a1"} Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.883988 4809 scope.go:117] "RemoveContainer" containerID="8ba8276dcb5be98f7c797e34b3c581ff98a20751857615734b305c88fb45cf3c" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.920779 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kdb7w"] Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.921858 4809 scope.go:117] "RemoveContainer" containerID="c3d8f55333997d1324bfe2e904dd299f830d1df733ecc91f47bff19672e0460c" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.931293 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kdb7w"] Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.943924 4809 scope.go:117] "RemoveContainer" containerID="1f410d50cf1e49fd99c4b6f62f3e462e7f09776f12a3ca6b65212523f9bf611b" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.993097 4809 scope.go:117] "RemoveContainer" containerID="8ba8276dcb5be98f7c797e34b3c581ff98a20751857615734b305c88fb45cf3c" Sep 30 02:03:15 crc kubenswrapper[4809]: E0930 02:03:15.993716 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ba8276dcb5be98f7c797e34b3c581ff98a20751857615734b305c88fb45cf3c\": container with ID starting with 8ba8276dcb5be98f7c797e34b3c581ff98a20751857615734b305c88fb45cf3c not found: ID does not exist" containerID="8ba8276dcb5be98f7c797e34b3c581ff98a20751857615734b305c88fb45cf3c" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.993755 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ba8276dcb5be98f7c797e34b3c581ff98a20751857615734b305c88fb45cf3c"} err="failed to get container status \"8ba8276dcb5be98f7c797e34b3c581ff98a20751857615734b305c88fb45cf3c\": rpc error: code = NotFound desc = could not find container \"8ba8276dcb5be98f7c797e34b3c581ff98a20751857615734b305c88fb45cf3c\": container with ID starting with 8ba8276dcb5be98f7c797e34b3c581ff98a20751857615734b305c88fb45cf3c not found: ID does not exist" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.993781 4809 scope.go:117] "RemoveContainer" containerID="c3d8f55333997d1324bfe2e904dd299f830d1df733ecc91f47bff19672e0460c" Sep 30 02:03:15 crc kubenswrapper[4809]: E0930 02:03:15.994105 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3d8f55333997d1324bfe2e904dd299f830d1df733ecc91f47bff19672e0460c\": container with ID starting with c3d8f55333997d1324bfe2e904dd299f830d1df733ecc91f47bff19672e0460c not found: ID does not exist" containerID="c3d8f55333997d1324bfe2e904dd299f830d1df733ecc91f47bff19672e0460c" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.994127 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3d8f55333997d1324bfe2e904dd299f830d1df733ecc91f47bff19672e0460c"} err="failed to get container status \"c3d8f55333997d1324bfe2e904dd299f830d1df733ecc91f47bff19672e0460c\": rpc error: code = NotFound desc = could not find container \"c3d8f55333997d1324bfe2e904dd299f830d1df733ecc91f47bff19672e0460c\": container with ID starting with c3d8f55333997d1324bfe2e904dd299f830d1df733ecc91f47bff19672e0460c not found: ID does not exist" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.994146 4809 scope.go:117] "RemoveContainer" containerID="1f410d50cf1e49fd99c4b6f62f3e462e7f09776f12a3ca6b65212523f9bf611b" Sep 30 02:03:15 crc kubenswrapper[4809]: E0930 02:03:15.994461 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f410d50cf1e49fd99c4b6f62f3e462e7f09776f12a3ca6b65212523f9bf611b\": container with ID starting with 1f410d50cf1e49fd99c4b6f62f3e462e7f09776f12a3ca6b65212523f9bf611b not found: ID does not exist" containerID="1f410d50cf1e49fd99c4b6f62f3e462e7f09776f12a3ca6b65212523f9bf611b" Sep 30 02:03:15 crc kubenswrapper[4809]: I0930 02:03:15.994488 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f410d50cf1e49fd99c4b6f62f3e462e7f09776f12a3ca6b65212523f9bf611b"} err="failed to get container status \"1f410d50cf1e49fd99c4b6f62f3e462e7f09776f12a3ca6b65212523f9bf611b\": rpc error: code = NotFound desc = could not find container \"1f410d50cf1e49fd99c4b6f62f3e462e7f09776f12a3ca6b65212523f9bf611b\": container with ID starting with 1f410d50cf1e49fd99c4b6f62f3e462e7f09776f12a3ca6b65212523f9bf611b not found: ID does not exist" Sep 30 02:03:17 crc kubenswrapper[4809]: I0930 02:03:17.709536 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab514495-c155-45b9-a2cf-6dd73ba16f85" path="/var/lib/kubelet/pods/ab514495-c155-45b9-a2cf-6dd73ba16f85/volumes" Sep 30 02:03:19 crc kubenswrapper[4809]: I0930 02:03:19.787114 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4bxnk" Sep 30 02:03:19 crc kubenswrapper[4809]: I0930 02:03:19.859612 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4bxnk" Sep 30 02:03:20 crc kubenswrapper[4809]: I0930 02:03:20.345432 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4bxnk"] Sep 30 02:03:20 crc kubenswrapper[4809]: I0930 02:03:20.950030 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4bxnk" podUID="62f5d7bd-14df-45b3-8ab2-6a932e4b78ed" containerName="registry-server" containerID="cri-o://021f8bf05f269c80721d5d8d9c21bfc126a9e91eb4f83c922a42ba2cfd37af09" gracePeriod=2 Sep 30 02:03:21 crc kubenswrapper[4809]: I0930 02:03:21.498579 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4bxnk" Sep 30 02:03:21 crc kubenswrapper[4809]: I0930 02:03:21.574485 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f5d7bd-14df-45b3-8ab2-6a932e4b78ed-utilities\") pod \"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed\" (UID: \"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed\") " Sep 30 02:03:21 crc kubenswrapper[4809]: I0930 02:03:21.574550 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f5d7bd-14df-45b3-8ab2-6a932e4b78ed-catalog-content\") pod \"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed\" (UID: \"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed\") " Sep 30 02:03:21 crc kubenswrapper[4809]: I0930 02:03:21.574846 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mfxlh\" (UniqueName: \"kubernetes.io/projected/62f5d7bd-14df-45b3-8ab2-6a932e4b78ed-kube-api-access-mfxlh\") pod \"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed\" (UID: \"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed\") " Sep 30 02:03:21 crc kubenswrapper[4809]: I0930 02:03:21.575691 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62f5d7bd-14df-45b3-8ab2-6a932e4b78ed-utilities" (OuterVolumeSpecName: "utilities") pod "62f5d7bd-14df-45b3-8ab2-6a932e4b78ed" (UID: "62f5d7bd-14df-45b3-8ab2-6a932e4b78ed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:03:21 crc kubenswrapper[4809]: I0930 02:03:21.582085 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62f5d7bd-14df-45b3-8ab2-6a932e4b78ed-kube-api-access-mfxlh" (OuterVolumeSpecName: "kube-api-access-mfxlh") pod "62f5d7bd-14df-45b3-8ab2-6a932e4b78ed" (UID: "62f5d7bd-14df-45b3-8ab2-6a932e4b78ed"). InnerVolumeSpecName "kube-api-access-mfxlh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:03:21 crc kubenswrapper[4809]: I0930 02:03:21.645493 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62f5d7bd-14df-45b3-8ab2-6a932e4b78ed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "62f5d7bd-14df-45b3-8ab2-6a932e4b78ed" (UID: "62f5d7bd-14df-45b3-8ab2-6a932e4b78ed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:03:21 crc kubenswrapper[4809]: I0930 02:03:21.677501 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mfxlh\" (UniqueName: \"kubernetes.io/projected/62f5d7bd-14df-45b3-8ab2-6a932e4b78ed-kube-api-access-mfxlh\") on node \"crc\" DevicePath \"\"" Sep 30 02:03:21 crc kubenswrapper[4809]: I0930 02:03:21.677542 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f5d7bd-14df-45b3-8ab2-6a932e4b78ed-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 02:03:21 crc kubenswrapper[4809]: I0930 02:03:21.677554 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f5d7bd-14df-45b3-8ab2-6a932e4b78ed-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 02:03:21 crc kubenswrapper[4809]: I0930 02:03:21.977579 4809 generic.go:334] "Generic (PLEG): container finished" podID="62f5d7bd-14df-45b3-8ab2-6a932e4b78ed" containerID="021f8bf05f269c80721d5d8d9c21bfc126a9e91eb4f83c922a42ba2cfd37af09" exitCode=0 Sep 30 02:03:21 crc kubenswrapper[4809]: I0930 02:03:21.977686 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bxnk" event={"ID":"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed","Type":"ContainerDied","Data":"021f8bf05f269c80721d5d8d9c21bfc126a9e91eb4f83c922a42ba2cfd37af09"} Sep 30 02:03:21 crc kubenswrapper[4809]: I0930 02:03:21.977738 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bxnk" event={"ID":"62f5d7bd-14df-45b3-8ab2-6a932e4b78ed","Type":"ContainerDied","Data":"ab3a25c5951ec121ab119baee5e69a88c22d1793faac607f70b32b2f74267247"} Sep 30 02:03:21 crc kubenswrapper[4809]: I0930 02:03:21.977762 4809 scope.go:117] "RemoveContainer" containerID="021f8bf05f269c80721d5d8d9c21bfc126a9e91eb4f83c922a42ba2cfd37af09" Sep 30 02:03:21 crc kubenswrapper[4809]: I0930 02:03:21.978015 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4bxnk" Sep 30 02:03:22 crc kubenswrapper[4809]: I0930 02:03:22.015579 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4bxnk"] Sep 30 02:03:22 crc kubenswrapper[4809]: I0930 02:03:22.024142 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4bxnk"] Sep 30 02:03:22 crc kubenswrapper[4809]: I0930 02:03:22.036344 4809 scope.go:117] "RemoveContainer" containerID="edef16abf5dbb8424a2e3e81a8a6e10928ff67be9ca0036f14fd0170cad3c0cb" Sep 30 02:03:22 crc kubenswrapper[4809]: I0930 02:03:22.061307 4809 scope.go:117] "RemoveContainer" containerID="b779f2a46bfe86b73274e9c83150053c46ae3e5ad7282fb8fb26740c5a049fb0" Sep 30 02:03:22 crc kubenswrapper[4809]: I0930 02:03:22.128028 4809 scope.go:117] "RemoveContainer" containerID="021f8bf05f269c80721d5d8d9c21bfc126a9e91eb4f83c922a42ba2cfd37af09" Sep 30 02:03:22 crc kubenswrapper[4809]: E0930 02:03:22.128506 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"021f8bf05f269c80721d5d8d9c21bfc126a9e91eb4f83c922a42ba2cfd37af09\": container with ID starting with 021f8bf05f269c80721d5d8d9c21bfc126a9e91eb4f83c922a42ba2cfd37af09 not found: ID does not exist" containerID="021f8bf05f269c80721d5d8d9c21bfc126a9e91eb4f83c922a42ba2cfd37af09" Sep 30 02:03:22 crc kubenswrapper[4809]: I0930 02:03:22.128565 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"021f8bf05f269c80721d5d8d9c21bfc126a9e91eb4f83c922a42ba2cfd37af09"} err="failed to get container status \"021f8bf05f269c80721d5d8d9c21bfc126a9e91eb4f83c922a42ba2cfd37af09\": rpc error: code = NotFound desc = could not find container \"021f8bf05f269c80721d5d8d9c21bfc126a9e91eb4f83c922a42ba2cfd37af09\": container with ID starting with 021f8bf05f269c80721d5d8d9c21bfc126a9e91eb4f83c922a42ba2cfd37af09 not found: ID does not exist" Sep 30 02:03:22 crc kubenswrapper[4809]: I0930 02:03:22.128599 4809 scope.go:117] "RemoveContainer" containerID="edef16abf5dbb8424a2e3e81a8a6e10928ff67be9ca0036f14fd0170cad3c0cb" Sep 30 02:03:22 crc kubenswrapper[4809]: E0930 02:03:22.129122 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edef16abf5dbb8424a2e3e81a8a6e10928ff67be9ca0036f14fd0170cad3c0cb\": container with ID starting with edef16abf5dbb8424a2e3e81a8a6e10928ff67be9ca0036f14fd0170cad3c0cb not found: ID does not exist" containerID="edef16abf5dbb8424a2e3e81a8a6e10928ff67be9ca0036f14fd0170cad3c0cb" Sep 30 02:03:22 crc kubenswrapper[4809]: I0930 02:03:22.129155 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edef16abf5dbb8424a2e3e81a8a6e10928ff67be9ca0036f14fd0170cad3c0cb"} err="failed to get container status \"edef16abf5dbb8424a2e3e81a8a6e10928ff67be9ca0036f14fd0170cad3c0cb\": rpc error: code = NotFound desc = could not find container \"edef16abf5dbb8424a2e3e81a8a6e10928ff67be9ca0036f14fd0170cad3c0cb\": container with ID starting with edef16abf5dbb8424a2e3e81a8a6e10928ff67be9ca0036f14fd0170cad3c0cb not found: ID does not exist" Sep 30 02:03:22 crc kubenswrapper[4809]: I0930 02:03:22.129176 4809 scope.go:117] "RemoveContainer" containerID="b779f2a46bfe86b73274e9c83150053c46ae3e5ad7282fb8fb26740c5a049fb0" Sep 30 02:03:22 crc kubenswrapper[4809]: E0930 02:03:22.129424 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b779f2a46bfe86b73274e9c83150053c46ae3e5ad7282fb8fb26740c5a049fb0\": container with ID starting with b779f2a46bfe86b73274e9c83150053c46ae3e5ad7282fb8fb26740c5a049fb0 not found: ID does not exist" containerID="b779f2a46bfe86b73274e9c83150053c46ae3e5ad7282fb8fb26740c5a049fb0" Sep 30 02:03:22 crc kubenswrapper[4809]: I0930 02:03:22.129453 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b779f2a46bfe86b73274e9c83150053c46ae3e5ad7282fb8fb26740c5a049fb0"} err="failed to get container status \"b779f2a46bfe86b73274e9c83150053c46ae3e5ad7282fb8fb26740c5a049fb0\": rpc error: code = NotFound desc = could not find container \"b779f2a46bfe86b73274e9c83150053c46ae3e5ad7282fb8fb26740c5a049fb0\": container with ID starting with b779f2a46bfe86b73274e9c83150053c46ae3e5ad7282fb8fb26740c5a049fb0 not found: ID does not exist" Sep 30 02:03:23 crc kubenswrapper[4809]: I0930 02:03:23.713181 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62f5d7bd-14df-45b3-8ab2-6a932e4b78ed" path="/var/lib/kubelet/pods/62f5d7bd-14df-45b3-8ab2-6a932e4b78ed/volumes" Sep 30 02:03:25 crc kubenswrapper[4809]: I0930 02:03:25.325272 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:03:25 crc kubenswrapper[4809]: I0930 02:03:25.325363 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:03:55 crc kubenswrapper[4809]: I0930 02:03:55.325581 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:03:55 crc kubenswrapper[4809]: I0930 02:03:55.326459 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:03:55 crc kubenswrapper[4809]: I0930 02:03:55.326551 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 02:03:55 crc kubenswrapper[4809]: I0930 02:03:55.327844 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"458b55bd463e7782a8d22a7687a71bd620504535dd2a4c4447bbfdda853ab8da"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 02:03:55 crc kubenswrapper[4809]: I0930 02:03:55.327957 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://458b55bd463e7782a8d22a7687a71bd620504535dd2a4c4447bbfdda853ab8da" gracePeriod=600 Sep 30 02:03:56 crc kubenswrapper[4809]: I0930 02:03:56.418520 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="458b55bd463e7782a8d22a7687a71bd620504535dd2a4c4447bbfdda853ab8da" exitCode=0 Sep 30 02:03:56 crc kubenswrapper[4809]: I0930 02:03:56.418633 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"458b55bd463e7782a8d22a7687a71bd620504535dd2a4c4447bbfdda853ab8da"} Sep 30 02:03:56 crc kubenswrapper[4809]: I0930 02:03:56.419045 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff"} Sep 30 02:03:56 crc kubenswrapper[4809]: I0930 02:03:56.419069 4809 scope.go:117] "RemoveContainer" containerID="00b982f8da0403c584276e990d7ee31aebac82fa3d9e63908cf4f537d321a652" Sep 30 02:05:55 crc kubenswrapper[4809]: I0930 02:05:55.324557 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:05:55 crc kubenswrapper[4809]: I0930 02:05:55.325220 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:06:25 crc kubenswrapper[4809]: I0930 02:06:25.325283 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:06:25 crc kubenswrapper[4809]: I0930 02:06:25.325754 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:06:55 crc kubenswrapper[4809]: I0930 02:06:55.325811 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:06:55 crc kubenswrapper[4809]: I0930 02:06:55.326836 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:06:55 crc kubenswrapper[4809]: I0930 02:06:55.327003 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 02:06:55 crc kubenswrapper[4809]: I0930 02:06:55.328791 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 02:06:55 crc kubenswrapper[4809]: I0930 02:06:55.331092 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" gracePeriod=600 Sep 30 02:06:55 crc kubenswrapper[4809]: E0930 02:06:55.468536 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:06:55 crc kubenswrapper[4809]: I0930 02:06:55.568738 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" exitCode=0 Sep 30 02:06:55 crc kubenswrapper[4809]: I0930 02:06:55.568809 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff"} Sep 30 02:06:55 crc kubenswrapper[4809]: I0930 02:06:55.568862 4809 scope.go:117] "RemoveContainer" containerID="458b55bd463e7782a8d22a7687a71bd620504535dd2a4c4447bbfdda853ab8da" Sep 30 02:06:55 crc kubenswrapper[4809]: I0930 02:06:55.570637 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:06:55 crc kubenswrapper[4809]: E0930 02:06:55.570967 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:07:09 crc kubenswrapper[4809]: I0930 02:07:09.735684 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:07:09 crc kubenswrapper[4809]: E0930 02:07:09.736511 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:07:24 crc kubenswrapper[4809]: I0930 02:07:24.691036 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:07:24 crc kubenswrapper[4809]: E0930 02:07:24.692070 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:07:35 crc kubenswrapper[4809]: I0930 02:07:35.691709 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:07:35 crc kubenswrapper[4809]: E0930 02:07:35.692631 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:07:46 crc kubenswrapper[4809]: I0930 02:07:46.690348 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:07:46 crc kubenswrapper[4809]: E0930 02:07:46.690979 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:07:58 crc kubenswrapper[4809]: I0930 02:07:58.691329 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:07:58 crc kubenswrapper[4809]: E0930 02:07:58.693257 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:08:13 crc kubenswrapper[4809]: I0930 02:08:13.693816 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:08:13 crc kubenswrapper[4809]: E0930 02:08:13.694765 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:08:28 crc kubenswrapper[4809]: I0930 02:08:28.691888 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:08:28 crc kubenswrapper[4809]: E0930 02:08:28.692673 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:08:40 crc kubenswrapper[4809]: I0930 02:08:40.691109 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:08:40 crc kubenswrapper[4809]: E0930 02:08:40.691935 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:08:51 crc kubenswrapper[4809]: I0930 02:08:51.706553 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:08:51 crc kubenswrapper[4809]: E0930 02:08:51.707169 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:09:05 crc kubenswrapper[4809]: I0930 02:09:05.691713 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:09:05 crc kubenswrapper[4809]: E0930 02:09:05.692383 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:09:16 crc kubenswrapper[4809]: I0930 02:09:16.691202 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:09:16 crc kubenswrapper[4809]: E0930 02:09:16.692039 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:09:31 crc kubenswrapper[4809]: I0930 02:09:31.690963 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:09:31 crc kubenswrapper[4809]: E0930 02:09:31.691834 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:09:42 crc kubenswrapper[4809]: I0930 02:09:42.690901 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:09:42 crc kubenswrapper[4809]: E0930 02:09:42.694053 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:09:53 crc kubenswrapper[4809]: I0930 02:09:53.692499 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:09:53 crc kubenswrapper[4809]: E0930 02:09:53.693547 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:10:04 crc kubenswrapper[4809]: I0930 02:10:04.691685 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:10:04 crc kubenswrapper[4809]: E0930 02:10:04.692571 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:10:18 crc kubenswrapper[4809]: I0930 02:10:18.691231 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:10:18 crc kubenswrapper[4809]: E0930 02:10:18.692117 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:10:33 crc kubenswrapper[4809]: I0930 02:10:33.691306 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:10:33 crc kubenswrapper[4809]: E0930 02:10:33.692106 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:10:45 crc kubenswrapper[4809]: I0930 02:10:45.692336 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:10:45 crc kubenswrapper[4809]: E0930 02:10:45.693261 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:10:57 crc kubenswrapper[4809]: I0930 02:10:57.692661 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:10:57 crc kubenswrapper[4809]: E0930 02:10:57.694108 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:11:10 crc kubenswrapper[4809]: I0930 02:11:10.691293 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:11:10 crc kubenswrapper[4809]: E0930 02:11:10.692068 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:11:25 crc kubenswrapper[4809]: I0930 02:11:25.693848 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:11:25 crc kubenswrapper[4809]: E0930 02:11:25.694911 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:11:40 crc kubenswrapper[4809]: I0930 02:11:40.691242 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:11:40 crc kubenswrapper[4809]: E0930 02:11:40.692383 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:11:51 crc kubenswrapper[4809]: I0930 02:11:51.691669 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:11:51 crc kubenswrapper[4809]: E0930 02:11:51.692463 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:12:04 crc kubenswrapper[4809]: I0930 02:12:04.719590 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:12:05 crc kubenswrapper[4809]: I0930 02:12:05.359296 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"e91c8c4896ef8d900599ba6b9bdad60e9cdebe2df1b84beee0f23cdb7c39b382"} Sep 30 02:12:56 crc kubenswrapper[4809]: I0930 02:12:56.810941 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tqkwp"] Sep 30 02:12:56 crc kubenswrapper[4809]: E0930 02:12:56.811907 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab514495-c155-45b9-a2cf-6dd73ba16f85" containerName="registry-server" Sep 30 02:12:56 crc kubenswrapper[4809]: I0930 02:12:56.811918 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab514495-c155-45b9-a2cf-6dd73ba16f85" containerName="registry-server" Sep 30 02:12:56 crc kubenswrapper[4809]: E0930 02:12:56.811932 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="303628c9-5695-47c1-894f-5daeea6f6ef3" containerName="extract-content" Sep 30 02:12:56 crc kubenswrapper[4809]: I0930 02:12:56.811940 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="303628c9-5695-47c1-894f-5daeea6f6ef3" containerName="extract-content" Sep 30 02:12:56 crc kubenswrapper[4809]: E0930 02:12:56.811951 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab514495-c155-45b9-a2cf-6dd73ba16f85" containerName="extract-content" Sep 30 02:12:56 crc kubenswrapper[4809]: I0930 02:12:56.811959 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab514495-c155-45b9-a2cf-6dd73ba16f85" containerName="extract-content" Sep 30 02:12:56 crc kubenswrapper[4809]: E0930 02:12:56.811993 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="303628c9-5695-47c1-894f-5daeea6f6ef3" containerName="extract-utilities" Sep 30 02:12:56 crc kubenswrapper[4809]: I0930 02:12:56.812000 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="303628c9-5695-47c1-894f-5daeea6f6ef3" containerName="extract-utilities" Sep 30 02:12:56 crc kubenswrapper[4809]: E0930 02:12:56.812010 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62f5d7bd-14df-45b3-8ab2-6a932e4b78ed" containerName="extract-utilities" Sep 30 02:12:56 crc kubenswrapper[4809]: I0930 02:12:56.812015 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="62f5d7bd-14df-45b3-8ab2-6a932e4b78ed" containerName="extract-utilities" Sep 30 02:12:56 crc kubenswrapper[4809]: E0930 02:12:56.812030 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="303628c9-5695-47c1-894f-5daeea6f6ef3" containerName="registry-server" Sep 30 02:12:56 crc kubenswrapper[4809]: I0930 02:12:56.812036 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="303628c9-5695-47c1-894f-5daeea6f6ef3" containerName="registry-server" Sep 30 02:12:56 crc kubenswrapper[4809]: E0930 02:12:56.812052 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62f5d7bd-14df-45b3-8ab2-6a932e4b78ed" containerName="registry-server" Sep 30 02:12:56 crc kubenswrapper[4809]: I0930 02:12:56.812057 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="62f5d7bd-14df-45b3-8ab2-6a932e4b78ed" containerName="registry-server" Sep 30 02:12:56 crc kubenswrapper[4809]: E0930 02:12:56.812064 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62f5d7bd-14df-45b3-8ab2-6a932e4b78ed" containerName="extract-content" Sep 30 02:12:56 crc kubenswrapper[4809]: I0930 02:12:56.812072 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="62f5d7bd-14df-45b3-8ab2-6a932e4b78ed" containerName="extract-content" Sep 30 02:12:56 crc kubenswrapper[4809]: E0930 02:12:56.812081 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab514495-c155-45b9-a2cf-6dd73ba16f85" containerName="extract-utilities" Sep 30 02:12:56 crc kubenswrapper[4809]: I0930 02:12:56.812087 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab514495-c155-45b9-a2cf-6dd73ba16f85" containerName="extract-utilities" Sep 30 02:12:56 crc kubenswrapper[4809]: I0930 02:12:56.812273 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="62f5d7bd-14df-45b3-8ab2-6a932e4b78ed" containerName="registry-server" Sep 30 02:12:56 crc kubenswrapper[4809]: I0930 02:12:56.812304 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="303628c9-5695-47c1-894f-5daeea6f6ef3" containerName="registry-server" Sep 30 02:12:56 crc kubenswrapper[4809]: I0930 02:12:56.812314 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab514495-c155-45b9-a2cf-6dd73ba16f85" containerName="registry-server" Sep 30 02:12:56 crc kubenswrapper[4809]: I0930 02:12:56.814383 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tqkwp" Sep 30 02:12:56 crc kubenswrapper[4809]: I0930 02:12:56.823775 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tqkwp"] Sep 30 02:12:56 crc kubenswrapper[4809]: I0930 02:12:56.941422 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef0a3eb8-66ff-40f0-af00-3523984c3092-utilities\") pod \"redhat-operators-tqkwp\" (UID: \"ef0a3eb8-66ff-40f0-af00-3523984c3092\") " pod="openshift-marketplace/redhat-operators-tqkwp" Sep 30 02:12:56 crc kubenswrapper[4809]: I0930 02:12:56.941578 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dgvn\" (UniqueName: \"kubernetes.io/projected/ef0a3eb8-66ff-40f0-af00-3523984c3092-kube-api-access-5dgvn\") pod \"redhat-operators-tqkwp\" (UID: \"ef0a3eb8-66ff-40f0-af00-3523984c3092\") " pod="openshift-marketplace/redhat-operators-tqkwp" Sep 30 02:12:56 crc kubenswrapper[4809]: I0930 02:12:56.941816 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef0a3eb8-66ff-40f0-af00-3523984c3092-catalog-content\") pod \"redhat-operators-tqkwp\" (UID: \"ef0a3eb8-66ff-40f0-af00-3523984c3092\") " pod="openshift-marketplace/redhat-operators-tqkwp" Sep 30 02:12:57 crc kubenswrapper[4809]: I0930 02:12:57.044128 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef0a3eb8-66ff-40f0-af00-3523984c3092-utilities\") pod \"redhat-operators-tqkwp\" (UID: \"ef0a3eb8-66ff-40f0-af00-3523984c3092\") " pod="openshift-marketplace/redhat-operators-tqkwp" Sep 30 02:12:57 crc kubenswrapper[4809]: I0930 02:12:57.044223 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dgvn\" (UniqueName: \"kubernetes.io/projected/ef0a3eb8-66ff-40f0-af00-3523984c3092-kube-api-access-5dgvn\") pod \"redhat-operators-tqkwp\" (UID: \"ef0a3eb8-66ff-40f0-af00-3523984c3092\") " pod="openshift-marketplace/redhat-operators-tqkwp" Sep 30 02:12:57 crc kubenswrapper[4809]: I0930 02:12:57.044271 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef0a3eb8-66ff-40f0-af00-3523984c3092-catalog-content\") pod \"redhat-operators-tqkwp\" (UID: \"ef0a3eb8-66ff-40f0-af00-3523984c3092\") " pod="openshift-marketplace/redhat-operators-tqkwp" Sep 30 02:12:57 crc kubenswrapper[4809]: I0930 02:12:57.044722 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef0a3eb8-66ff-40f0-af00-3523984c3092-utilities\") pod \"redhat-operators-tqkwp\" (UID: \"ef0a3eb8-66ff-40f0-af00-3523984c3092\") " pod="openshift-marketplace/redhat-operators-tqkwp" Sep 30 02:12:57 crc kubenswrapper[4809]: I0930 02:12:57.044786 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef0a3eb8-66ff-40f0-af00-3523984c3092-catalog-content\") pod \"redhat-operators-tqkwp\" (UID: \"ef0a3eb8-66ff-40f0-af00-3523984c3092\") " pod="openshift-marketplace/redhat-operators-tqkwp" Sep 30 02:12:57 crc kubenswrapper[4809]: I0930 02:12:57.069732 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dgvn\" (UniqueName: \"kubernetes.io/projected/ef0a3eb8-66ff-40f0-af00-3523984c3092-kube-api-access-5dgvn\") pod \"redhat-operators-tqkwp\" (UID: \"ef0a3eb8-66ff-40f0-af00-3523984c3092\") " pod="openshift-marketplace/redhat-operators-tqkwp" Sep 30 02:12:57 crc kubenswrapper[4809]: I0930 02:12:57.140529 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tqkwp" Sep 30 02:12:57 crc kubenswrapper[4809]: I0930 02:12:57.673007 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tqkwp"] Sep 30 02:12:57 crc kubenswrapper[4809]: I0930 02:12:57.983998 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tqkwp" event={"ID":"ef0a3eb8-66ff-40f0-af00-3523984c3092","Type":"ContainerStarted","Data":"d1946ef07e7b70f9ea8cd19910164206c6f5f5775b0595a3ae89d449723be4b1"} Sep 30 02:12:57 crc kubenswrapper[4809]: I0930 02:12:57.984037 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tqkwp" event={"ID":"ef0a3eb8-66ff-40f0-af00-3523984c3092","Type":"ContainerStarted","Data":"8fee9a8ec19e269dee1f2923b075e4a1ed9126f142f2f557a4aa6ed2700292af"} Sep 30 02:12:58 crc kubenswrapper[4809]: I0930 02:12:58.994502 4809 generic.go:334] "Generic (PLEG): container finished" podID="ef0a3eb8-66ff-40f0-af00-3523984c3092" containerID="d1946ef07e7b70f9ea8cd19910164206c6f5f5775b0595a3ae89d449723be4b1" exitCode=0 Sep 30 02:12:58 crc kubenswrapper[4809]: I0930 02:12:58.994923 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tqkwp" event={"ID":"ef0a3eb8-66ff-40f0-af00-3523984c3092","Type":"ContainerDied","Data":"d1946ef07e7b70f9ea8cd19910164206c6f5f5775b0595a3ae89d449723be4b1"} Sep 30 02:12:58 crc kubenswrapper[4809]: I0930 02:12:58.997988 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 02:13:09 crc kubenswrapper[4809]: I0930 02:13:09.118347 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tqkwp" event={"ID":"ef0a3eb8-66ff-40f0-af00-3523984c3092","Type":"ContainerStarted","Data":"c9b981826584deb3c9b8321a0f6e32c2d5bf9c0172bdd9a7dde7ec2cd38f1521"} Sep 30 02:13:10 crc kubenswrapper[4809]: I0930 02:13:10.134383 4809 generic.go:334] "Generic (PLEG): container finished" podID="ef0a3eb8-66ff-40f0-af00-3523984c3092" containerID="c9b981826584deb3c9b8321a0f6e32c2d5bf9c0172bdd9a7dde7ec2cd38f1521" exitCode=0 Sep 30 02:13:10 crc kubenswrapper[4809]: I0930 02:13:10.134542 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tqkwp" event={"ID":"ef0a3eb8-66ff-40f0-af00-3523984c3092","Type":"ContainerDied","Data":"c9b981826584deb3c9b8321a0f6e32c2d5bf9c0172bdd9a7dde7ec2cd38f1521"} Sep 30 02:13:11 crc kubenswrapper[4809]: I0930 02:13:11.162015 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tqkwp" event={"ID":"ef0a3eb8-66ff-40f0-af00-3523984c3092","Type":"ContainerStarted","Data":"dae41702b0d3cafc2fc6071f7ff6bdc9e5dc4fad4730e621e772af95c5c2189b"} Sep 30 02:13:11 crc kubenswrapper[4809]: I0930 02:13:11.196790 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tqkwp" podStartSLOduration=3.343681618 podStartE2EDuration="15.19677039s" podCreationTimestamp="2025-09-30 02:12:56 +0000 UTC" firstStartedPulling="2025-09-30 02:12:58.99761428 +0000 UTC m=+7430.033863708" lastFinishedPulling="2025-09-30 02:13:10.850703062 +0000 UTC m=+7441.886952480" observedRunningTime="2025-09-30 02:13:11.187048735 +0000 UTC m=+7442.223298143" watchObservedRunningTime="2025-09-30 02:13:11.19677039 +0000 UTC m=+7442.233019798" Sep 30 02:13:17 crc kubenswrapper[4809]: I0930 02:13:17.141102 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tqkwp" Sep 30 02:13:17 crc kubenswrapper[4809]: I0930 02:13:17.141728 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tqkwp" Sep 30 02:13:17 crc kubenswrapper[4809]: I0930 02:13:17.218983 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tqkwp" Sep 30 02:13:17 crc kubenswrapper[4809]: I0930 02:13:17.333033 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tqkwp" Sep 30 02:13:17 crc kubenswrapper[4809]: I0930 02:13:17.444156 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tqkwp"] Sep 30 02:13:17 crc kubenswrapper[4809]: I0930 02:13:17.508409 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vwc2v"] Sep 30 02:13:17 crc kubenswrapper[4809]: I0930 02:13:17.508695 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vwc2v" podUID="21e7f024-842e-4582-b6a5-2776d20aefb2" containerName="registry-server" containerID="cri-o://d3392a70ddf9650a433b47889ccfefe81ca11b0ba5b61086a60e5832a3af8f8b" gracePeriod=2 Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.134047 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vwc2v" Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.274856 4809 generic.go:334] "Generic (PLEG): container finished" podID="21e7f024-842e-4582-b6a5-2776d20aefb2" containerID="d3392a70ddf9650a433b47889ccfefe81ca11b0ba5b61086a60e5832a3af8f8b" exitCode=0 Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.274946 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vwc2v" event={"ID":"21e7f024-842e-4582-b6a5-2776d20aefb2","Type":"ContainerDied","Data":"d3392a70ddf9650a433b47889ccfefe81ca11b0ba5b61086a60e5832a3af8f8b"} Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.274928 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vwc2v" Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.274986 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vwc2v" event={"ID":"21e7f024-842e-4582-b6a5-2776d20aefb2","Type":"ContainerDied","Data":"76cd0fe4500348bc4f61012b7084d96fb81815f8307a431a553ca219a2926e09"} Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.275008 4809 scope.go:117] "RemoveContainer" containerID="d3392a70ddf9650a433b47889ccfefe81ca11b0ba5b61086a60e5832a3af8f8b" Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.297880 4809 scope.go:117] "RemoveContainer" containerID="1b744c60d3ec3ef7d244294a13ba794fd9140370a29d36166b3df6db0ee664a8" Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.306680 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21e7f024-842e-4582-b6a5-2776d20aefb2-catalog-content\") pod \"21e7f024-842e-4582-b6a5-2776d20aefb2\" (UID: \"21e7f024-842e-4582-b6a5-2776d20aefb2\") " Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.307381 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8p6v\" (UniqueName: \"kubernetes.io/projected/21e7f024-842e-4582-b6a5-2776d20aefb2-kube-api-access-t8p6v\") pod \"21e7f024-842e-4582-b6a5-2776d20aefb2\" (UID: \"21e7f024-842e-4582-b6a5-2776d20aefb2\") " Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.307633 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21e7f024-842e-4582-b6a5-2776d20aefb2-utilities\") pod \"21e7f024-842e-4582-b6a5-2776d20aefb2\" (UID: \"21e7f024-842e-4582-b6a5-2776d20aefb2\") " Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.309297 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21e7f024-842e-4582-b6a5-2776d20aefb2-utilities" (OuterVolumeSpecName: "utilities") pod "21e7f024-842e-4582-b6a5-2776d20aefb2" (UID: "21e7f024-842e-4582-b6a5-2776d20aefb2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.315992 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21e7f024-842e-4582-b6a5-2776d20aefb2-kube-api-access-t8p6v" (OuterVolumeSpecName: "kube-api-access-t8p6v") pod "21e7f024-842e-4582-b6a5-2776d20aefb2" (UID: "21e7f024-842e-4582-b6a5-2776d20aefb2"). InnerVolumeSpecName "kube-api-access-t8p6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.326895 4809 scope.go:117] "RemoveContainer" containerID="b5c1e4b8941fc513f80fd023bcb4fd81c16e75432f363cc155c7b120ecaa7857" Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.412131 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8p6v\" (UniqueName: \"kubernetes.io/projected/21e7f024-842e-4582-b6a5-2776d20aefb2-kube-api-access-t8p6v\") on node \"crc\" DevicePath \"\"" Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.412180 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21e7f024-842e-4582-b6a5-2776d20aefb2-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.421524 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21e7f024-842e-4582-b6a5-2776d20aefb2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "21e7f024-842e-4582-b6a5-2776d20aefb2" (UID: "21e7f024-842e-4582-b6a5-2776d20aefb2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.428735 4809 scope.go:117] "RemoveContainer" containerID="d3392a70ddf9650a433b47889ccfefe81ca11b0ba5b61086a60e5832a3af8f8b" Sep 30 02:13:18 crc kubenswrapper[4809]: E0930 02:13:18.429262 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3392a70ddf9650a433b47889ccfefe81ca11b0ba5b61086a60e5832a3af8f8b\": container with ID starting with d3392a70ddf9650a433b47889ccfefe81ca11b0ba5b61086a60e5832a3af8f8b not found: ID does not exist" containerID="d3392a70ddf9650a433b47889ccfefe81ca11b0ba5b61086a60e5832a3af8f8b" Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.429345 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3392a70ddf9650a433b47889ccfefe81ca11b0ba5b61086a60e5832a3af8f8b"} err="failed to get container status \"d3392a70ddf9650a433b47889ccfefe81ca11b0ba5b61086a60e5832a3af8f8b\": rpc error: code = NotFound desc = could not find container \"d3392a70ddf9650a433b47889ccfefe81ca11b0ba5b61086a60e5832a3af8f8b\": container with ID starting with d3392a70ddf9650a433b47889ccfefe81ca11b0ba5b61086a60e5832a3af8f8b not found: ID does not exist" Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.429394 4809 scope.go:117] "RemoveContainer" containerID="1b744c60d3ec3ef7d244294a13ba794fd9140370a29d36166b3df6db0ee664a8" Sep 30 02:13:18 crc kubenswrapper[4809]: E0930 02:13:18.429894 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b744c60d3ec3ef7d244294a13ba794fd9140370a29d36166b3df6db0ee664a8\": container with ID starting with 1b744c60d3ec3ef7d244294a13ba794fd9140370a29d36166b3df6db0ee664a8 not found: ID does not exist" containerID="1b744c60d3ec3ef7d244294a13ba794fd9140370a29d36166b3df6db0ee664a8" Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.429958 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b744c60d3ec3ef7d244294a13ba794fd9140370a29d36166b3df6db0ee664a8"} err="failed to get container status \"1b744c60d3ec3ef7d244294a13ba794fd9140370a29d36166b3df6db0ee664a8\": rpc error: code = NotFound desc = could not find container \"1b744c60d3ec3ef7d244294a13ba794fd9140370a29d36166b3df6db0ee664a8\": container with ID starting with 1b744c60d3ec3ef7d244294a13ba794fd9140370a29d36166b3df6db0ee664a8 not found: ID does not exist" Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.430007 4809 scope.go:117] "RemoveContainer" containerID="b5c1e4b8941fc513f80fd023bcb4fd81c16e75432f363cc155c7b120ecaa7857" Sep 30 02:13:18 crc kubenswrapper[4809]: E0930 02:13:18.430395 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5c1e4b8941fc513f80fd023bcb4fd81c16e75432f363cc155c7b120ecaa7857\": container with ID starting with b5c1e4b8941fc513f80fd023bcb4fd81c16e75432f363cc155c7b120ecaa7857 not found: ID does not exist" containerID="b5c1e4b8941fc513f80fd023bcb4fd81c16e75432f363cc155c7b120ecaa7857" Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.430444 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5c1e4b8941fc513f80fd023bcb4fd81c16e75432f363cc155c7b120ecaa7857"} err="failed to get container status \"b5c1e4b8941fc513f80fd023bcb4fd81c16e75432f363cc155c7b120ecaa7857\": rpc error: code = NotFound desc = could not find container \"b5c1e4b8941fc513f80fd023bcb4fd81c16e75432f363cc155c7b120ecaa7857\": container with ID starting with b5c1e4b8941fc513f80fd023bcb4fd81c16e75432f363cc155c7b120ecaa7857 not found: ID does not exist" Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.514636 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21e7f024-842e-4582-b6a5-2776d20aefb2-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.614489 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vwc2v"] Sep 30 02:13:18 crc kubenswrapper[4809]: I0930 02:13:18.626003 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vwc2v"] Sep 30 02:13:19 crc kubenswrapper[4809]: I0930 02:13:19.719272 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21e7f024-842e-4582-b6a5-2776d20aefb2" path="/var/lib/kubelet/pods/21e7f024-842e-4582-b6a5-2776d20aefb2/volumes" Sep 30 02:13:37 crc kubenswrapper[4809]: I0930 02:13:37.807156 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vfshc"] Sep 30 02:13:37 crc kubenswrapper[4809]: E0930 02:13:37.808062 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21e7f024-842e-4582-b6a5-2776d20aefb2" containerName="extract-utilities" Sep 30 02:13:37 crc kubenswrapper[4809]: I0930 02:13:37.808075 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="21e7f024-842e-4582-b6a5-2776d20aefb2" containerName="extract-utilities" Sep 30 02:13:37 crc kubenswrapper[4809]: E0930 02:13:37.808103 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21e7f024-842e-4582-b6a5-2776d20aefb2" containerName="extract-content" Sep 30 02:13:37 crc kubenswrapper[4809]: I0930 02:13:37.808109 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="21e7f024-842e-4582-b6a5-2776d20aefb2" containerName="extract-content" Sep 30 02:13:37 crc kubenswrapper[4809]: E0930 02:13:37.808131 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21e7f024-842e-4582-b6a5-2776d20aefb2" containerName="registry-server" Sep 30 02:13:37 crc kubenswrapper[4809]: I0930 02:13:37.808139 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="21e7f024-842e-4582-b6a5-2776d20aefb2" containerName="registry-server" Sep 30 02:13:37 crc kubenswrapper[4809]: I0930 02:13:37.808339 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="21e7f024-842e-4582-b6a5-2776d20aefb2" containerName="registry-server" Sep 30 02:13:37 crc kubenswrapper[4809]: I0930 02:13:37.809865 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vfshc" Sep 30 02:13:37 crc kubenswrapper[4809]: I0930 02:13:37.818234 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vfshc"] Sep 30 02:13:37 crc kubenswrapper[4809]: I0930 02:13:37.999671 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ba34e21-96ae-4ca0-af10-713326bd5c10-utilities\") pod \"certified-operators-vfshc\" (UID: \"1ba34e21-96ae-4ca0-af10-713326bd5c10\") " pod="openshift-marketplace/certified-operators-vfshc" Sep 30 02:13:38 crc kubenswrapper[4809]: I0930 02:13:37.999815 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ba34e21-96ae-4ca0-af10-713326bd5c10-catalog-content\") pod \"certified-operators-vfshc\" (UID: \"1ba34e21-96ae-4ca0-af10-713326bd5c10\") " pod="openshift-marketplace/certified-operators-vfshc" Sep 30 02:13:38 crc kubenswrapper[4809]: I0930 02:13:37.999946 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhrbs\" (UniqueName: \"kubernetes.io/projected/1ba34e21-96ae-4ca0-af10-713326bd5c10-kube-api-access-jhrbs\") pod \"certified-operators-vfshc\" (UID: \"1ba34e21-96ae-4ca0-af10-713326bd5c10\") " pod="openshift-marketplace/certified-operators-vfshc" Sep 30 02:13:38 crc kubenswrapper[4809]: I0930 02:13:38.101665 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhrbs\" (UniqueName: \"kubernetes.io/projected/1ba34e21-96ae-4ca0-af10-713326bd5c10-kube-api-access-jhrbs\") pod \"certified-operators-vfshc\" (UID: \"1ba34e21-96ae-4ca0-af10-713326bd5c10\") " pod="openshift-marketplace/certified-operators-vfshc" Sep 30 02:13:38 crc kubenswrapper[4809]: I0930 02:13:38.101858 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ba34e21-96ae-4ca0-af10-713326bd5c10-utilities\") pod \"certified-operators-vfshc\" (UID: \"1ba34e21-96ae-4ca0-af10-713326bd5c10\") " pod="openshift-marketplace/certified-operators-vfshc" Sep 30 02:13:38 crc kubenswrapper[4809]: I0930 02:13:38.101973 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ba34e21-96ae-4ca0-af10-713326bd5c10-catalog-content\") pod \"certified-operators-vfshc\" (UID: \"1ba34e21-96ae-4ca0-af10-713326bd5c10\") " pod="openshift-marketplace/certified-operators-vfshc" Sep 30 02:13:38 crc kubenswrapper[4809]: I0930 02:13:38.102570 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ba34e21-96ae-4ca0-af10-713326bd5c10-utilities\") pod \"certified-operators-vfshc\" (UID: \"1ba34e21-96ae-4ca0-af10-713326bd5c10\") " pod="openshift-marketplace/certified-operators-vfshc" Sep 30 02:13:38 crc kubenswrapper[4809]: I0930 02:13:38.102725 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ba34e21-96ae-4ca0-af10-713326bd5c10-catalog-content\") pod \"certified-operators-vfshc\" (UID: \"1ba34e21-96ae-4ca0-af10-713326bd5c10\") " pod="openshift-marketplace/certified-operators-vfshc" Sep 30 02:13:38 crc kubenswrapper[4809]: I0930 02:13:38.124168 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhrbs\" (UniqueName: \"kubernetes.io/projected/1ba34e21-96ae-4ca0-af10-713326bd5c10-kube-api-access-jhrbs\") pod \"certified-operators-vfshc\" (UID: \"1ba34e21-96ae-4ca0-af10-713326bd5c10\") " pod="openshift-marketplace/certified-operators-vfshc" Sep 30 02:13:38 crc kubenswrapper[4809]: I0930 02:13:38.129138 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vfshc" Sep 30 02:13:38 crc kubenswrapper[4809]: I0930 02:13:38.697366 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vfshc"] Sep 30 02:13:39 crc kubenswrapper[4809]: I0930 02:13:39.520305 4809 generic.go:334] "Generic (PLEG): container finished" podID="1ba34e21-96ae-4ca0-af10-713326bd5c10" containerID="e4a72f37670f9f6e79f3308cbaf1f38ca85d2772dbcae9e5019557b967a7a666" exitCode=0 Sep 30 02:13:39 crc kubenswrapper[4809]: I0930 02:13:39.520859 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vfshc" event={"ID":"1ba34e21-96ae-4ca0-af10-713326bd5c10","Type":"ContainerDied","Data":"e4a72f37670f9f6e79f3308cbaf1f38ca85d2772dbcae9e5019557b967a7a666"} Sep 30 02:13:39 crc kubenswrapper[4809]: I0930 02:13:39.520893 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vfshc" event={"ID":"1ba34e21-96ae-4ca0-af10-713326bd5c10","Type":"ContainerStarted","Data":"caf4ffc5f8a5f3b8f6b2ca2e6820c5bc4a82ea4c42df33a7d2cce12b155ceac0"} Sep 30 02:13:41 crc kubenswrapper[4809]: I0930 02:13:41.550761 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vfshc" event={"ID":"1ba34e21-96ae-4ca0-af10-713326bd5c10","Type":"ContainerStarted","Data":"5753410cf23f6dfe75be5735702ea7fd8731973364c780b2cc78d0e96bc41062"} Sep 30 02:13:42 crc kubenswrapper[4809]: I0930 02:13:42.563975 4809 generic.go:334] "Generic (PLEG): container finished" podID="1ba34e21-96ae-4ca0-af10-713326bd5c10" containerID="5753410cf23f6dfe75be5735702ea7fd8731973364c780b2cc78d0e96bc41062" exitCode=0 Sep 30 02:13:42 crc kubenswrapper[4809]: I0930 02:13:42.564041 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vfshc" event={"ID":"1ba34e21-96ae-4ca0-af10-713326bd5c10","Type":"ContainerDied","Data":"5753410cf23f6dfe75be5735702ea7fd8731973364c780b2cc78d0e96bc41062"} Sep 30 02:13:43 crc kubenswrapper[4809]: I0930 02:13:43.579911 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vfshc" event={"ID":"1ba34e21-96ae-4ca0-af10-713326bd5c10","Type":"ContainerStarted","Data":"76449b5f028468800e2b5355d5ca2ef9aad4adfd9de268b0473369eaa460d9e0"} Sep 30 02:13:43 crc kubenswrapper[4809]: I0930 02:13:43.607959 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vfshc" podStartSLOduration=3.020300666 podStartE2EDuration="6.607933706s" podCreationTimestamp="2025-09-30 02:13:37 +0000 UTC" firstStartedPulling="2025-09-30 02:13:39.523885837 +0000 UTC m=+7470.560135295" lastFinishedPulling="2025-09-30 02:13:43.111518917 +0000 UTC m=+7474.147768335" observedRunningTime="2025-09-30 02:13:43.598862498 +0000 UTC m=+7474.635111936" watchObservedRunningTime="2025-09-30 02:13:43.607933706 +0000 UTC m=+7474.644183124" Sep 30 02:13:48 crc kubenswrapper[4809]: I0930 02:13:48.129725 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vfshc" Sep 30 02:13:48 crc kubenswrapper[4809]: I0930 02:13:48.130484 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vfshc" Sep 30 02:13:48 crc kubenswrapper[4809]: I0930 02:13:48.217131 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vfshc" Sep 30 02:13:48 crc kubenswrapper[4809]: I0930 02:13:48.292821 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-drt4c"] Sep 30 02:13:48 crc kubenswrapper[4809]: I0930 02:13:48.296169 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-drt4c" Sep 30 02:13:48 crc kubenswrapper[4809]: I0930 02:13:48.326013 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-drt4c"] Sep 30 02:13:48 crc kubenswrapper[4809]: I0930 02:13:48.461997 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6920cc6d-375c-43ec-8264-fe99b9b0b71a-catalog-content\") pod \"redhat-marketplace-drt4c\" (UID: \"6920cc6d-375c-43ec-8264-fe99b9b0b71a\") " pod="openshift-marketplace/redhat-marketplace-drt4c" Sep 30 02:13:48 crc kubenswrapper[4809]: I0930 02:13:48.462098 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7v8r\" (UniqueName: \"kubernetes.io/projected/6920cc6d-375c-43ec-8264-fe99b9b0b71a-kube-api-access-x7v8r\") pod \"redhat-marketplace-drt4c\" (UID: \"6920cc6d-375c-43ec-8264-fe99b9b0b71a\") " pod="openshift-marketplace/redhat-marketplace-drt4c" Sep 30 02:13:48 crc kubenswrapper[4809]: I0930 02:13:48.462237 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6920cc6d-375c-43ec-8264-fe99b9b0b71a-utilities\") pod \"redhat-marketplace-drt4c\" (UID: \"6920cc6d-375c-43ec-8264-fe99b9b0b71a\") " pod="openshift-marketplace/redhat-marketplace-drt4c" Sep 30 02:13:48 crc kubenswrapper[4809]: I0930 02:13:48.564387 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6920cc6d-375c-43ec-8264-fe99b9b0b71a-catalog-content\") pod \"redhat-marketplace-drt4c\" (UID: \"6920cc6d-375c-43ec-8264-fe99b9b0b71a\") " pod="openshift-marketplace/redhat-marketplace-drt4c" Sep 30 02:13:48 crc kubenswrapper[4809]: I0930 02:13:48.564494 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7v8r\" (UniqueName: \"kubernetes.io/projected/6920cc6d-375c-43ec-8264-fe99b9b0b71a-kube-api-access-x7v8r\") pod \"redhat-marketplace-drt4c\" (UID: \"6920cc6d-375c-43ec-8264-fe99b9b0b71a\") " pod="openshift-marketplace/redhat-marketplace-drt4c" Sep 30 02:13:48 crc kubenswrapper[4809]: I0930 02:13:48.564590 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6920cc6d-375c-43ec-8264-fe99b9b0b71a-utilities\") pod \"redhat-marketplace-drt4c\" (UID: \"6920cc6d-375c-43ec-8264-fe99b9b0b71a\") " pod="openshift-marketplace/redhat-marketplace-drt4c" Sep 30 02:13:48 crc kubenswrapper[4809]: I0930 02:13:48.565303 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6920cc6d-375c-43ec-8264-fe99b9b0b71a-utilities\") pod \"redhat-marketplace-drt4c\" (UID: \"6920cc6d-375c-43ec-8264-fe99b9b0b71a\") " pod="openshift-marketplace/redhat-marketplace-drt4c" Sep 30 02:13:48 crc kubenswrapper[4809]: I0930 02:13:48.565393 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6920cc6d-375c-43ec-8264-fe99b9b0b71a-catalog-content\") pod \"redhat-marketplace-drt4c\" (UID: \"6920cc6d-375c-43ec-8264-fe99b9b0b71a\") " pod="openshift-marketplace/redhat-marketplace-drt4c" Sep 30 02:13:48 crc kubenswrapper[4809]: I0930 02:13:48.586116 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7v8r\" (UniqueName: \"kubernetes.io/projected/6920cc6d-375c-43ec-8264-fe99b9b0b71a-kube-api-access-x7v8r\") pod \"redhat-marketplace-drt4c\" (UID: \"6920cc6d-375c-43ec-8264-fe99b9b0b71a\") " pod="openshift-marketplace/redhat-marketplace-drt4c" Sep 30 02:13:48 crc kubenswrapper[4809]: I0930 02:13:48.670282 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-drt4c" Sep 30 02:13:48 crc kubenswrapper[4809]: I0930 02:13:48.715884 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vfshc" Sep 30 02:13:49 crc kubenswrapper[4809]: I0930 02:13:49.153531 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-drt4c"] Sep 30 02:13:49 crc kubenswrapper[4809]: W0930 02:13:49.162333 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6920cc6d_375c_43ec_8264_fe99b9b0b71a.slice/crio-aa19ef058718beb6a8840ac0514a246ed7b6fa99cd8c3ab970bb0343b51be5bd WatchSource:0}: Error finding container aa19ef058718beb6a8840ac0514a246ed7b6fa99cd8c3ab970bb0343b51be5bd: Status 404 returned error can't find the container with id aa19ef058718beb6a8840ac0514a246ed7b6fa99cd8c3ab970bb0343b51be5bd Sep 30 02:13:49 crc kubenswrapper[4809]: I0930 02:13:49.666787 4809 generic.go:334] "Generic (PLEG): container finished" podID="6920cc6d-375c-43ec-8264-fe99b9b0b71a" containerID="eda3c783bd39852f72b64cd5e266858eb22bce80010cddd0161517e1c8a47183" exitCode=0 Sep 30 02:13:49 crc kubenswrapper[4809]: I0930 02:13:49.666872 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-drt4c" event={"ID":"6920cc6d-375c-43ec-8264-fe99b9b0b71a","Type":"ContainerDied","Data":"eda3c783bd39852f72b64cd5e266858eb22bce80010cddd0161517e1c8a47183"} Sep 30 02:13:49 crc kubenswrapper[4809]: I0930 02:13:49.667483 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-drt4c" event={"ID":"6920cc6d-375c-43ec-8264-fe99b9b0b71a","Type":"ContainerStarted","Data":"aa19ef058718beb6a8840ac0514a246ed7b6fa99cd8c3ab970bb0343b51be5bd"} Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.046997 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vfshc"] Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.048759 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vfshc" podUID="1ba34e21-96ae-4ca0-af10-713326bd5c10" containerName="registry-server" containerID="cri-o://76449b5f028468800e2b5355d5ca2ef9aad4adfd9de268b0473369eaa460d9e0" gracePeriod=2 Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.647583 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vfshc" Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.697918 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vfshc" Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.697931 4809 generic.go:334] "Generic (PLEG): container finished" podID="1ba34e21-96ae-4ca0-af10-713326bd5c10" containerID="76449b5f028468800e2b5355d5ca2ef9aad4adfd9de268b0473369eaa460d9e0" exitCode=0 Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.701442 4809 generic.go:334] "Generic (PLEG): container finished" podID="6920cc6d-375c-43ec-8264-fe99b9b0b71a" containerID="26d346935619ab5771c226257285fc47e78ad3127f5f2518d4acd5890334cc5d" exitCode=0 Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.712265 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vfshc" event={"ID":"1ba34e21-96ae-4ca0-af10-713326bd5c10","Type":"ContainerDied","Data":"76449b5f028468800e2b5355d5ca2ef9aad4adfd9de268b0473369eaa460d9e0"} Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.712338 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vfshc" event={"ID":"1ba34e21-96ae-4ca0-af10-713326bd5c10","Type":"ContainerDied","Data":"caf4ffc5f8a5f3b8f6b2ca2e6820c5bc4a82ea4c42df33a7d2cce12b155ceac0"} Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.712355 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-drt4c" event={"ID":"6920cc6d-375c-43ec-8264-fe99b9b0b71a","Type":"ContainerDied","Data":"26d346935619ab5771c226257285fc47e78ad3127f5f2518d4acd5890334cc5d"} Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.712386 4809 scope.go:117] "RemoveContainer" containerID="76449b5f028468800e2b5355d5ca2ef9aad4adfd9de268b0473369eaa460d9e0" Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.742101 4809 scope.go:117] "RemoveContainer" containerID="5753410cf23f6dfe75be5735702ea7fd8731973364c780b2cc78d0e96bc41062" Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.742142 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ba34e21-96ae-4ca0-af10-713326bd5c10-utilities\") pod \"1ba34e21-96ae-4ca0-af10-713326bd5c10\" (UID: \"1ba34e21-96ae-4ca0-af10-713326bd5c10\") " Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.742500 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ba34e21-96ae-4ca0-af10-713326bd5c10-catalog-content\") pod \"1ba34e21-96ae-4ca0-af10-713326bd5c10\" (UID: \"1ba34e21-96ae-4ca0-af10-713326bd5c10\") " Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.742636 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhrbs\" (UniqueName: \"kubernetes.io/projected/1ba34e21-96ae-4ca0-af10-713326bd5c10-kube-api-access-jhrbs\") pod \"1ba34e21-96ae-4ca0-af10-713326bd5c10\" (UID: \"1ba34e21-96ae-4ca0-af10-713326bd5c10\") " Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.743421 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ba34e21-96ae-4ca0-af10-713326bd5c10-utilities" (OuterVolumeSpecName: "utilities") pod "1ba34e21-96ae-4ca0-af10-713326bd5c10" (UID: "1ba34e21-96ae-4ca0-af10-713326bd5c10"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.743591 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ba34e21-96ae-4ca0-af10-713326bd5c10-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.777314 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ba34e21-96ae-4ca0-af10-713326bd5c10-kube-api-access-jhrbs" (OuterVolumeSpecName: "kube-api-access-jhrbs") pod "1ba34e21-96ae-4ca0-af10-713326bd5c10" (UID: "1ba34e21-96ae-4ca0-af10-713326bd5c10"). InnerVolumeSpecName "kube-api-access-jhrbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.791116 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ba34e21-96ae-4ca0-af10-713326bd5c10-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1ba34e21-96ae-4ca0-af10-713326bd5c10" (UID: "1ba34e21-96ae-4ca0-af10-713326bd5c10"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.837211 4809 scope.go:117] "RemoveContainer" containerID="e4a72f37670f9f6e79f3308cbaf1f38ca85d2772dbcae9e5019557b967a7a666" Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.848925 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhrbs\" (UniqueName: \"kubernetes.io/projected/1ba34e21-96ae-4ca0-af10-713326bd5c10-kube-api-access-jhrbs\") on node \"crc\" DevicePath \"\"" Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.848959 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ba34e21-96ae-4ca0-af10-713326bd5c10-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.895095 4809 scope.go:117] "RemoveContainer" containerID="76449b5f028468800e2b5355d5ca2ef9aad4adfd9de268b0473369eaa460d9e0" Sep 30 02:13:51 crc kubenswrapper[4809]: E0930 02:13:51.895493 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76449b5f028468800e2b5355d5ca2ef9aad4adfd9de268b0473369eaa460d9e0\": container with ID starting with 76449b5f028468800e2b5355d5ca2ef9aad4adfd9de268b0473369eaa460d9e0 not found: ID does not exist" containerID="76449b5f028468800e2b5355d5ca2ef9aad4adfd9de268b0473369eaa460d9e0" Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.895536 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76449b5f028468800e2b5355d5ca2ef9aad4adfd9de268b0473369eaa460d9e0"} err="failed to get container status \"76449b5f028468800e2b5355d5ca2ef9aad4adfd9de268b0473369eaa460d9e0\": rpc error: code = NotFound desc = could not find container \"76449b5f028468800e2b5355d5ca2ef9aad4adfd9de268b0473369eaa460d9e0\": container with ID starting with 76449b5f028468800e2b5355d5ca2ef9aad4adfd9de268b0473369eaa460d9e0 not found: ID does not exist" Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.895566 4809 scope.go:117] "RemoveContainer" containerID="5753410cf23f6dfe75be5735702ea7fd8731973364c780b2cc78d0e96bc41062" Sep 30 02:13:51 crc kubenswrapper[4809]: E0930 02:13:51.896189 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5753410cf23f6dfe75be5735702ea7fd8731973364c780b2cc78d0e96bc41062\": container with ID starting with 5753410cf23f6dfe75be5735702ea7fd8731973364c780b2cc78d0e96bc41062 not found: ID does not exist" containerID="5753410cf23f6dfe75be5735702ea7fd8731973364c780b2cc78d0e96bc41062" Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.896251 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5753410cf23f6dfe75be5735702ea7fd8731973364c780b2cc78d0e96bc41062"} err="failed to get container status \"5753410cf23f6dfe75be5735702ea7fd8731973364c780b2cc78d0e96bc41062\": rpc error: code = NotFound desc = could not find container \"5753410cf23f6dfe75be5735702ea7fd8731973364c780b2cc78d0e96bc41062\": container with ID starting with 5753410cf23f6dfe75be5735702ea7fd8731973364c780b2cc78d0e96bc41062 not found: ID does not exist" Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.896275 4809 scope.go:117] "RemoveContainer" containerID="e4a72f37670f9f6e79f3308cbaf1f38ca85d2772dbcae9e5019557b967a7a666" Sep 30 02:13:51 crc kubenswrapper[4809]: E0930 02:13:51.896541 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4a72f37670f9f6e79f3308cbaf1f38ca85d2772dbcae9e5019557b967a7a666\": container with ID starting with e4a72f37670f9f6e79f3308cbaf1f38ca85d2772dbcae9e5019557b967a7a666 not found: ID does not exist" containerID="e4a72f37670f9f6e79f3308cbaf1f38ca85d2772dbcae9e5019557b967a7a666" Sep 30 02:13:51 crc kubenswrapper[4809]: I0930 02:13:51.896570 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4a72f37670f9f6e79f3308cbaf1f38ca85d2772dbcae9e5019557b967a7a666"} err="failed to get container status \"e4a72f37670f9f6e79f3308cbaf1f38ca85d2772dbcae9e5019557b967a7a666\": rpc error: code = NotFound desc = could not find container \"e4a72f37670f9f6e79f3308cbaf1f38ca85d2772dbcae9e5019557b967a7a666\": container with ID starting with e4a72f37670f9f6e79f3308cbaf1f38ca85d2772dbcae9e5019557b967a7a666 not found: ID does not exist" Sep 30 02:13:52 crc kubenswrapper[4809]: I0930 02:13:52.045624 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vfshc"] Sep 30 02:13:52 crc kubenswrapper[4809]: I0930 02:13:52.061821 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vfshc"] Sep 30 02:13:52 crc kubenswrapper[4809]: I0930 02:13:52.715607 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-drt4c" event={"ID":"6920cc6d-375c-43ec-8264-fe99b9b0b71a","Type":"ContainerStarted","Data":"f952b3990cfe0a18d08dcb01238936f1d7569e2c18cba07e59a798195f932aa3"} Sep 30 02:13:52 crc kubenswrapper[4809]: I0930 02:13:52.739331 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-drt4c" podStartSLOduration=2.294951571 podStartE2EDuration="4.739314663s" podCreationTimestamp="2025-09-30 02:13:48 +0000 UTC" firstStartedPulling="2025-09-30 02:13:49.669531905 +0000 UTC m=+7480.705781313" lastFinishedPulling="2025-09-30 02:13:52.113894997 +0000 UTC m=+7483.150144405" observedRunningTime="2025-09-30 02:13:52.733132834 +0000 UTC m=+7483.769382322" watchObservedRunningTime="2025-09-30 02:13:52.739314663 +0000 UTC m=+7483.775564081" Sep 30 02:13:53 crc kubenswrapper[4809]: I0930 02:13:53.702783 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ba34e21-96ae-4ca0-af10-713326bd5c10" path="/var/lib/kubelet/pods/1ba34e21-96ae-4ca0-af10-713326bd5c10/volumes" Sep 30 02:13:58 crc kubenswrapper[4809]: I0930 02:13:58.670628 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-drt4c" Sep 30 02:13:58 crc kubenswrapper[4809]: I0930 02:13:58.671113 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-drt4c" Sep 30 02:13:58 crc kubenswrapper[4809]: I0930 02:13:58.785461 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-drt4c" Sep 30 02:13:58 crc kubenswrapper[4809]: I0930 02:13:58.842037 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-drt4c" Sep 30 02:13:59 crc kubenswrapper[4809]: I0930 02:13:59.028868 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-drt4c"] Sep 30 02:14:00 crc kubenswrapper[4809]: I0930 02:14:00.794700 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-drt4c" podUID="6920cc6d-375c-43ec-8264-fe99b9b0b71a" containerName="registry-server" containerID="cri-o://f952b3990cfe0a18d08dcb01238936f1d7569e2c18cba07e59a798195f932aa3" gracePeriod=2 Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.344232 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-drt4c" Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.491217 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7v8r\" (UniqueName: \"kubernetes.io/projected/6920cc6d-375c-43ec-8264-fe99b9b0b71a-kube-api-access-x7v8r\") pod \"6920cc6d-375c-43ec-8264-fe99b9b0b71a\" (UID: \"6920cc6d-375c-43ec-8264-fe99b9b0b71a\") " Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.491294 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6920cc6d-375c-43ec-8264-fe99b9b0b71a-catalog-content\") pod \"6920cc6d-375c-43ec-8264-fe99b9b0b71a\" (UID: \"6920cc6d-375c-43ec-8264-fe99b9b0b71a\") " Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.491501 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6920cc6d-375c-43ec-8264-fe99b9b0b71a-utilities\") pod \"6920cc6d-375c-43ec-8264-fe99b9b0b71a\" (UID: \"6920cc6d-375c-43ec-8264-fe99b9b0b71a\") " Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.492933 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6920cc6d-375c-43ec-8264-fe99b9b0b71a-utilities" (OuterVolumeSpecName: "utilities") pod "6920cc6d-375c-43ec-8264-fe99b9b0b71a" (UID: "6920cc6d-375c-43ec-8264-fe99b9b0b71a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.499955 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6920cc6d-375c-43ec-8264-fe99b9b0b71a-kube-api-access-x7v8r" (OuterVolumeSpecName: "kube-api-access-x7v8r") pod "6920cc6d-375c-43ec-8264-fe99b9b0b71a" (UID: "6920cc6d-375c-43ec-8264-fe99b9b0b71a"). InnerVolumeSpecName "kube-api-access-x7v8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.515270 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6920cc6d-375c-43ec-8264-fe99b9b0b71a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6920cc6d-375c-43ec-8264-fe99b9b0b71a" (UID: "6920cc6d-375c-43ec-8264-fe99b9b0b71a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.594345 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6920cc6d-375c-43ec-8264-fe99b9b0b71a-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.594395 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7v8r\" (UniqueName: \"kubernetes.io/projected/6920cc6d-375c-43ec-8264-fe99b9b0b71a-kube-api-access-x7v8r\") on node \"crc\" DevicePath \"\"" Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.594413 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6920cc6d-375c-43ec-8264-fe99b9b0b71a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.808958 4809 generic.go:334] "Generic (PLEG): container finished" podID="6920cc6d-375c-43ec-8264-fe99b9b0b71a" containerID="f952b3990cfe0a18d08dcb01238936f1d7569e2c18cba07e59a798195f932aa3" exitCode=0 Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.809014 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-drt4c" Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.809043 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-drt4c" event={"ID":"6920cc6d-375c-43ec-8264-fe99b9b0b71a","Type":"ContainerDied","Data":"f952b3990cfe0a18d08dcb01238936f1d7569e2c18cba07e59a798195f932aa3"} Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.809562 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-drt4c" event={"ID":"6920cc6d-375c-43ec-8264-fe99b9b0b71a","Type":"ContainerDied","Data":"aa19ef058718beb6a8840ac0514a246ed7b6fa99cd8c3ab970bb0343b51be5bd"} Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.809604 4809 scope.go:117] "RemoveContainer" containerID="f952b3990cfe0a18d08dcb01238936f1d7569e2c18cba07e59a798195f932aa3" Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.857330 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-drt4c"] Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.872066 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-drt4c"] Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.874432 4809 scope.go:117] "RemoveContainer" containerID="26d346935619ab5771c226257285fc47e78ad3127f5f2518d4acd5890334cc5d" Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.908996 4809 scope.go:117] "RemoveContainer" containerID="eda3c783bd39852f72b64cd5e266858eb22bce80010cddd0161517e1c8a47183" Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.955071 4809 scope.go:117] "RemoveContainer" containerID="f952b3990cfe0a18d08dcb01238936f1d7569e2c18cba07e59a798195f932aa3" Sep 30 02:14:01 crc kubenswrapper[4809]: E0930 02:14:01.960314 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f952b3990cfe0a18d08dcb01238936f1d7569e2c18cba07e59a798195f932aa3\": container with ID starting with f952b3990cfe0a18d08dcb01238936f1d7569e2c18cba07e59a798195f932aa3 not found: ID does not exist" containerID="f952b3990cfe0a18d08dcb01238936f1d7569e2c18cba07e59a798195f932aa3" Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.960365 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f952b3990cfe0a18d08dcb01238936f1d7569e2c18cba07e59a798195f932aa3"} err="failed to get container status \"f952b3990cfe0a18d08dcb01238936f1d7569e2c18cba07e59a798195f932aa3\": rpc error: code = NotFound desc = could not find container \"f952b3990cfe0a18d08dcb01238936f1d7569e2c18cba07e59a798195f932aa3\": container with ID starting with f952b3990cfe0a18d08dcb01238936f1d7569e2c18cba07e59a798195f932aa3 not found: ID does not exist" Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.960419 4809 scope.go:117] "RemoveContainer" containerID="26d346935619ab5771c226257285fc47e78ad3127f5f2518d4acd5890334cc5d" Sep 30 02:14:01 crc kubenswrapper[4809]: E0930 02:14:01.960865 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26d346935619ab5771c226257285fc47e78ad3127f5f2518d4acd5890334cc5d\": container with ID starting with 26d346935619ab5771c226257285fc47e78ad3127f5f2518d4acd5890334cc5d not found: ID does not exist" containerID="26d346935619ab5771c226257285fc47e78ad3127f5f2518d4acd5890334cc5d" Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.960894 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26d346935619ab5771c226257285fc47e78ad3127f5f2518d4acd5890334cc5d"} err="failed to get container status \"26d346935619ab5771c226257285fc47e78ad3127f5f2518d4acd5890334cc5d\": rpc error: code = NotFound desc = could not find container \"26d346935619ab5771c226257285fc47e78ad3127f5f2518d4acd5890334cc5d\": container with ID starting with 26d346935619ab5771c226257285fc47e78ad3127f5f2518d4acd5890334cc5d not found: ID does not exist" Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.960910 4809 scope.go:117] "RemoveContainer" containerID="eda3c783bd39852f72b64cd5e266858eb22bce80010cddd0161517e1c8a47183" Sep 30 02:14:01 crc kubenswrapper[4809]: E0930 02:14:01.961269 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eda3c783bd39852f72b64cd5e266858eb22bce80010cddd0161517e1c8a47183\": container with ID starting with eda3c783bd39852f72b64cd5e266858eb22bce80010cddd0161517e1c8a47183 not found: ID does not exist" containerID="eda3c783bd39852f72b64cd5e266858eb22bce80010cddd0161517e1c8a47183" Sep 30 02:14:01 crc kubenswrapper[4809]: I0930 02:14:01.961289 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eda3c783bd39852f72b64cd5e266858eb22bce80010cddd0161517e1c8a47183"} err="failed to get container status \"eda3c783bd39852f72b64cd5e266858eb22bce80010cddd0161517e1c8a47183\": rpc error: code = NotFound desc = could not find container \"eda3c783bd39852f72b64cd5e266858eb22bce80010cddd0161517e1c8a47183\": container with ID starting with eda3c783bd39852f72b64cd5e266858eb22bce80010cddd0161517e1c8a47183 not found: ID does not exist" Sep 30 02:14:03 crc kubenswrapper[4809]: I0930 02:14:03.706823 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6920cc6d-375c-43ec-8264-fe99b9b0b71a" path="/var/lib/kubelet/pods/6920cc6d-375c-43ec-8264-fe99b9b0b71a/volumes" Sep 30 02:14:25 crc kubenswrapper[4809]: I0930 02:14:25.324790 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:14:25 crc kubenswrapper[4809]: I0930 02:14:25.326942 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:14:55 crc kubenswrapper[4809]: I0930 02:14:55.325078 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:14:55 crc kubenswrapper[4809]: I0930 02:14:55.325637 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.152101 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319975-l2b9k"] Sep 30 02:15:00 crc kubenswrapper[4809]: E0930 02:15:00.153379 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6920cc6d-375c-43ec-8264-fe99b9b0b71a" containerName="extract-content" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.153401 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="6920cc6d-375c-43ec-8264-fe99b9b0b71a" containerName="extract-content" Sep 30 02:15:00 crc kubenswrapper[4809]: E0930 02:15:00.153432 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ba34e21-96ae-4ca0-af10-713326bd5c10" containerName="registry-server" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.153439 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ba34e21-96ae-4ca0-af10-713326bd5c10" containerName="registry-server" Sep 30 02:15:00 crc kubenswrapper[4809]: E0930 02:15:00.153475 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ba34e21-96ae-4ca0-af10-713326bd5c10" containerName="extract-content" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.153482 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ba34e21-96ae-4ca0-af10-713326bd5c10" containerName="extract-content" Sep 30 02:15:00 crc kubenswrapper[4809]: E0930 02:15:00.153499 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6920cc6d-375c-43ec-8264-fe99b9b0b71a" containerName="extract-utilities" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.153507 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="6920cc6d-375c-43ec-8264-fe99b9b0b71a" containerName="extract-utilities" Sep 30 02:15:00 crc kubenswrapper[4809]: E0930 02:15:00.153522 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6920cc6d-375c-43ec-8264-fe99b9b0b71a" containerName="registry-server" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.153529 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="6920cc6d-375c-43ec-8264-fe99b9b0b71a" containerName="registry-server" Sep 30 02:15:00 crc kubenswrapper[4809]: E0930 02:15:00.153560 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ba34e21-96ae-4ca0-af10-713326bd5c10" containerName="extract-utilities" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.153567 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ba34e21-96ae-4ca0-af10-713326bd5c10" containerName="extract-utilities" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.153813 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="6920cc6d-375c-43ec-8264-fe99b9b0b71a" containerName="registry-server" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.153857 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ba34e21-96ae-4ca0-af10-713326bd5c10" containerName="registry-server" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.155788 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319975-l2b9k" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.159147 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.164239 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.182771 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319975-l2b9k"] Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.265048 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqqdg\" (UniqueName: \"kubernetes.io/projected/6f988360-e99e-4a51-8ba0-3a08bd81994c-kube-api-access-qqqdg\") pod \"collect-profiles-29319975-l2b9k\" (UID: \"6f988360-e99e-4a51-8ba0-3a08bd81994c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319975-l2b9k" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.265103 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6f988360-e99e-4a51-8ba0-3a08bd81994c-secret-volume\") pod \"collect-profiles-29319975-l2b9k\" (UID: \"6f988360-e99e-4a51-8ba0-3a08bd81994c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319975-l2b9k" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.265192 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6f988360-e99e-4a51-8ba0-3a08bd81994c-config-volume\") pod \"collect-profiles-29319975-l2b9k\" (UID: \"6f988360-e99e-4a51-8ba0-3a08bd81994c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319975-l2b9k" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.367116 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqqdg\" (UniqueName: \"kubernetes.io/projected/6f988360-e99e-4a51-8ba0-3a08bd81994c-kube-api-access-qqqdg\") pod \"collect-profiles-29319975-l2b9k\" (UID: \"6f988360-e99e-4a51-8ba0-3a08bd81994c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319975-l2b9k" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.367173 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6f988360-e99e-4a51-8ba0-3a08bd81994c-secret-volume\") pod \"collect-profiles-29319975-l2b9k\" (UID: \"6f988360-e99e-4a51-8ba0-3a08bd81994c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319975-l2b9k" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.367266 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6f988360-e99e-4a51-8ba0-3a08bd81994c-config-volume\") pod \"collect-profiles-29319975-l2b9k\" (UID: \"6f988360-e99e-4a51-8ba0-3a08bd81994c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319975-l2b9k" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.368513 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6f988360-e99e-4a51-8ba0-3a08bd81994c-config-volume\") pod \"collect-profiles-29319975-l2b9k\" (UID: \"6f988360-e99e-4a51-8ba0-3a08bd81994c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319975-l2b9k" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.374245 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6f988360-e99e-4a51-8ba0-3a08bd81994c-secret-volume\") pod \"collect-profiles-29319975-l2b9k\" (UID: \"6f988360-e99e-4a51-8ba0-3a08bd81994c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319975-l2b9k" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.384374 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqqdg\" (UniqueName: \"kubernetes.io/projected/6f988360-e99e-4a51-8ba0-3a08bd81994c-kube-api-access-qqqdg\") pod \"collect-profiles-29319975-l2b9k\" (UID: \"6f988360-e99e-4a51-8ba0-3a08bd81994c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319975-l2b9k" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.485581 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319975-l2b9k" Sep 30 02:15:00 crc kubenswrapper[4809]: I0930 02:15:00.971119 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319975-l2b9k"] Sep 30 02:15:01 crc kubenswrapper[4809]: I0930 02:15:01.600845 4809 generic.go:334] "Generic (PLEG): container finished" podID="6f988360-e99e-4a51-8ba0-3a08bd81994c" containerID="735f30fab094c5c5950e5c8c365cc770957307042c0b730426c6c98f8ab71977" exitCode=0 Sep 30 02:15:01 crc kubenswrapper[4809]: I0930 02:15:01.601007 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319975-l2b9k" event={"ID":"6f988360-e99e-4a51-8ba0-3a08bd81994c","Type":"ContainerDied","Data":"735f30fab094c5c5950e5c8c365cc770957307042c0b730426c6c98f8ab71977"} Sep 30 02:15:01 crc kubenswrapper[4809]: I0930 02:15:01.601405 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319975-l2b9k" event={"ID":"6f988360-e99e-4a51-8ba0-3a08bd81994c","Type":"ContainerStarted","Data":"21528cdbcd36e1925638d7d42287a30451789884dd0e8b54ad9c95e35d66f9c8"} Sep 30 02:15:03 crc kubenswrapper[4809]: I0930 02:15:03.014572 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319975-l2b9k" Sep 30 02:15:03 crc kubenswrapper[4809]: I0930 02:15:03.142365 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6f988360-e99e-4a51-8ba0-3a08bd81994c-secret-volume\") pod \"6f988360-e99e-4a51-8ba0-3a08bd81994c\" (UID: \"6f988360-e99e-4a51-8ba0-3a08bd81994c\") " Sep 30 02:15:03 crc kubenswrapper[4809]: I0930 02:15:03.142460 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6f988360-e99e-4a51-8ba0-3a08bd81994c-config-volume\") pod \"6f988360-e99e-4a51-8ba0-3a08bd81994c\" (UID: \"6f988360-e99e-4a51-8ba0-3a08bd81994c\") " Sep 30 02:15:03 crc kubenswrapper[4809]: I0930 02:15:03.142633 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qqqdg\" (UniqueName: \"kubernetes.io/projected/6f988360-e99e-4a51-8ba0-3a08bd81994c-kube-api-access-qqqdg\") pod \"6f988360-e99e-4a51-8ba0-3a08bd81994c\" (UID: \"6f988360-e99e-4a51-8ba0-3a08bd81994c\") " Sep 30 02:15:03 crc kubenswrapper[4809]: I0930 02:15:03.143319 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f988360-e99e-4a51-8ba0-3a08bd81994c-config-volume" (OuterVolumeSpecName: "config-volume") pod "6f988360-e99e-4a51-8ba0-3a08bd81994c" (UID: "6f988360-e99e-4a51-8ba0-3a08bd81994c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 02:15:03 crc kubenswrapper[4809]: I0930 02:15:03.143947 4809 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6f988360-e99e-4a51-8ba0-3a08bd81994c-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 02:15:03 crc kubenswrapper[4809]: I0930 02:15:03.151094 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f988360-e99e-4a51-8ba0-3a08bd81994c-kube-api-access-qqqdg" (OuterVolumeSpecName: "kube-api-access-qqqdg") pod "6f988360-e99e-4a51-8ba0-3a08bd81994c" (UID: "6f988360-e99e-4a51-8ba0-3a08bd81994c"). InnerVolumeSpecName "kube-api-access-qqqdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:15:03 crc kubenswrapper[4809]: I0930 02:15:03.151276 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f988360-e99e-4a51-8ba0-3a08bd81994c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6f988360-e99e-4a51-8ba0-3a08bd81994c" (UID: "6f988360-e99e-4a51-8ba0-3a08bd81994c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 02:15:03 crc kubenswrapper[4809]: I0930 02:15:03.246515 4809 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6f988360-e99e-4a51-8ba0-3a08bd81994c-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 02:15:03 crc kubenswrapper[4809]: I0930 02:15:03.246552 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qqqdg\" (UniqueName: \"kubernetes.io/projected/6f988360-e99e-4a51-8ba0-3a08bd81994c-kube-api-access-qqqdg\") on node \"crc\" DevicePath \"\"" Sep 30 02:15:03 crc kubenswrapper[4809]: I0930 02:15:03.624321 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319975-l2b9k" event={"ID":"6f988360-e99e-4a51-8ba0-3a08bd81994c","Type":"ContainerDied","Data":"21528cdbcd36e1925638d7d42287a30451789884dd0e8b54ad9c95e35d66f9c8"} Sep 30 02:15:03 crc kubenswrapper[4809]: I0930 02:15:03.624589 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21528cdbcd36e1925638d7d42287a30451789884dd0e8b54ad9c95e35d66f9c8" Sep 30 02:15:03 crc kubenswrapper[4809]: I0930 02:15:03.624349 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319975-l2b9k" Sep 30 02:15:04 crc kubenswrapper[4809]: I0930 02:15:04.117742 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf"] Sep 30 02:15:04 crc kubenswrapper[4809]: I0930 02:15:04.127092 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319930-6p4xf"] Sep 30 02:15:05 crc kubenswrapper[4809]: I0930 02:15:05.707819 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3eb6865-e0c7-46b7-8eb0-b12656040c1a" path="/var/lib/kubelet/pods/a3eb6865-e0c7-46b7-8eb0-b12656040c1a/volumes" Sep 30 02:15:20 crc kubenswrapper[4809]: I0930 02:15:20.558600 4809 scope.go:117] "RemoveContainer" containerID="6bbc2258fd1a487a1cb4a75e560fcbeb59ceb9ae90f9bec666f3acdc9426913f" Sep 30 02:15:25 crc kubenswrapper[4809]: I0930 02:15:25.325582 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:15:25 crc kubenswrapper[4809]: I0930 02:15:25.326427 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:15:25 crc kubenswrapper[4809]: I0930 02:15:25.326496 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 02:15:25 crc kubenswrapper[4809]: I0930 02:15:25.327847 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e91c8c4896ef8d900599ba6b9bdad60e9cdebe2df1b84beee0f23cdb7c39b382"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 02:15:25 crc kubenswrapper[4809]: I0930 02:15:25.327933 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://e91c8c4896ef8d900599ba6b9bdad60e9cdebe2df1b84beee0f23cdb7c39b382" gracePeriod=600 Sep 30 02:15:25 crc kubenswrapper[4809]: I0930 02:15:25.879855 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="e91c8c4896ef8d900599ba6b9bdad60e9cdebe2df1b84beee0f23cdb7c39b382" exitCode=0 Sep 30 02:15:25 crc kubenswrapper[4809]: I0930 02:15:25.879934 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"e91c8c4896ef8d900599ba6b9bdad60e9cdebe2df1b84beee0f23cdb7c39b382"} Sep 30 02:15:25 crc kubenswrapper[4809]: I0930 02:15:25.880825 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7"} Sep 30 02:15:25 crc kubenswrapper[4809]: I0930 02:15:25.880909 4809 scope.go:117] "RemoveContainer" containerID="5b505ac8ef32f2a19ea61f84dd8510ea14c03280ac0c1b0bb2330af30c37f5ff" Sep 30 02:15:25 crc kubenswrapper[4809]: I0930 02:15:25.939141 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-sqcv6"] Sep 30 02:15:25 crc kubenswrapper[4809]: E0930 02:15:25.939736 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f988360-e99e-4a51-8ba0-3a08bd81994c" containerName="collect-profiles" Sep 30 02:15:25 crc kubenswrapper[4809]: I0930 02:15:25.939763 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f988360-e99e-4a51-8ba0-3a08bd81994c" containerName="collect-profiles" Sep 30 02:15:25 crc kubenswrapper[4809]: I0930 02:15:25.940008 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f988360-e99e-4a51-8ba0-3a08bd81994c" containerName="collect-profiles" Sep 30 02:15:25 crc kubenswrapper[4809]: I0930 02:15:25.941719 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sqcv6" Sep 30 02:15:25 crc kubenswrapper[4809]: I0930 02:15:25.954587 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sqcv6"] Sep 30 02:15:26 crc kubenswrapper[4809]: I0930 02:15:26.006857 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3e50761-de33-44d4-bbe4-fac4ee3be3cb-catalog-content\") pod \"community-operators-sqcv6\" (UID: \"d3e50761-de33-44d4-bbe4-fac4ee3be3cb\") " pod="openshift-marketplace/community-operators-sqcv6" Sep 30 02:15:26 crc kubenswrapper[4809]: I0930 02:15:26.007286 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lct25\" (UniqueName: \"kubernetes.io/projected/d3e50761-de33-44d4-bbe4-fac4ee3be3cb-kube-api-access-lct25\") pod \"community-operators-sqcv6\" (UID: \"d3e50761-de33-44d4-bbe4-fac4ee3be3cb\") " pod="openshift-marketplace/community-operators-sqcv6" Sep 30 02:15:26 crc kubenswrapper[4809]: I0930 02:15:26.007421 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3e50761-de33-44d4-bbe4-fac4ee3be3cb-utilities\") pod \"community-operators-sqcv6\" (UID: \"d3e50761-de33-44d4-bbe4-fac4ee3be3cb\") " pod="openshift-marketplace/community-operators-sqcv6" Sep 30 02:15:26 crc kubenswrapper[4809]: I0930 02:15:26.110058 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3e50761-de33-44d4-bbe4-fac4ee3be3cb-catalog-content\") pod \"community-operators-sqcv6\" (UID: \"d3e50761-de33-44d4-bbe4-fac4ee3be3cb\") " pod="openshift-marketplace/community-operators-sqcv6" Sep 30 02:15:26 crc kubenswrapper[4809]: I0930 02:15:26.110252 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lct25\" (UniqueName: \"kubernetes.io/projected/d3e50761-de33-44d4-bbe4-fac4ee3be3cb-kube-api-access-lct25\") pod \"community-operators-sqcv6\" (UID: \"d3e50761-de33-44d4-bbe4-fac4ee3be3cb\") " pod="openshift-marketplace/community-operators-sqcv6" Sep 30 02:15:26 crc kubenswrapper[4809]: I0930 02:15:26.110294 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3e50761-de33-44d4-bbe4-fac4ee3be3cb-utilities\") pod \"community-operators-sqcv6\" (UID: \"d3e50761-de33-44d4-bbe4-fac4ee3be3cb\") " pod="openshift-marketplace/community-operators-sqcv6" Sep 30 02:15:26 crc kubenswrapper[4809]: I0930 02:15:26.110946 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3e50761-de33-44d4-bbe4-fac4ee3be3cb-catalog-content\") pod \"community-operators-sqcv6\" (UID: \"d3e50761-de33-44d4-bbe4-fac4ee3be3cb\") " pod="openshift-marketplace/community-operators-sqcv6" Sep 30 02:15:26 crc kubenswrapper[4809]: I0930 02:15:26.110994 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3e50761-de33-44d4-bbe4-fac4ee3be3cb-utilities\") pod \"community-operators-sqcv6\" (UID: \"d3e50761-de33-44d4-bbe4-fac4ee3be3cb\") " pod="openshift-marketplace/community-operators-sqcv6" Sep 30 02:15:26 crc kubenswrapper[4809]: I0930 02:15:26.129264 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lct25\" (UniqueName: \"kubernetes.io/projected/d3e50761-de33-44d4-bbe4-fac4ee3be3cb-kube-api-access-lct25\") pod \"community-operators-sqcv6\" (UID: \"d3e50761-de33-44d4-bbe4-fac4ee3be3cb\") " pod="openshift-marketplace/community-operators-sqcv6" Sep 30 02:15:26 crc kubenswrapper[4809]: I0930 02:15:26.260599 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sqcv6" Sep 30 02:15:26 crc kubenswrapper[4809]: I0930 02:15:26.892725 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sqcv6"] Sep 30 02:15:26 crc kubenswrapper[4809]: W0930 02:15:26.899588 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd3e50761_de33_44d4_bbe4_fac4ee3be3cb.slice/crio-70d07cd9d9679729329aa1e9d48c1bb16c503f85ec02ca53712b3ec6cee446e5 WatchSource:0}: Error finding container 70d07cd9d9679729329aa1e9d48c1bb16c503f85ec02ca53712b3ec6cee446e5: Status 404 returned error can't find the container with id 70d07cd9d9679729329aa1e9d48c1bb16c503f85ec02ca53712b3ec6cee446e5 Sep 30 02:15:27 crc kubenswrapper[4809]: I0930 02:15:27.908769 4809 generic.go:334] "Generic (PLEG): container finished" podID="d3e50761-de33-44d4-bbe4-fac4ee3be3cb" containerID="936e3ce2b8ccc40264cd5edbf55e6a86a699140af76b0085a9872c1abc353a3f" exitCode=0 Sep 30 02:15:27 crc kubenswrapper[4809]: I0930 02:15:27.908891 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sqcv6" event={"ID":"d3e50761-de33-44d4-bbe4-fac4ee3be3cb","Type":"ContainerDied","Data":"936e3ce2b8ccc40264cd5edbf55e6a86a699140af76b0085a9872c1abc353a3f"} Sep 30 02:15:27 crc kubenswrapper[4809]: I0930 02:15:27.909108 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sqcv6" event={"ID":"d3e50761-de33-44d4-bbe4-fac4ee3be3cb","Type":"ContainerStarted","Data":"70d07cd9d9679729329aa1e9d48c1bb16c503f85ec02ca53712b3ec6cee446e5"} Sep 30 02:15:29 crc kubenswrapper[4809]: I0930 02:15:29.936632 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sqcv6" event={"ID":"d3e50761-de33-44d4-bbe4-fac4ee3be3cb","Type":"ContainerStarted","Data":"f6ea9c67e7b8b0f4912e55b22eb5c377c2914796e61137334ce3d680bf8bfd45"} Sep 30 02:15:30 crc kubenswrapper[4809]: I0930 02:15:30.948005 4809 generic.go:334] "Generic (PLEG): container finished" podID="d3e50761-de33-44d4-bbe4-fac4ee3be3cb" containerID="f6ea9c67e7b8b0f4912e55b22eb5c377c2914796e61137334ce3d680bf8bfd45" exitCode=0 Sep 30 02:15:30 crc kubenswrapper[4809]: I0930 02:15:30.948258 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sqcv6" event={"ID":"d3e50761-de33-44d4-bbe4-fac4ee3be3cb","Type":"ContainerDied","Data":"f6ea9c67e7b8b0f4912e55b22eb5c377c2914796e61137334ce3d680bf8bfd45"} Sep 30 02:15:31 crc kubenswrapper[4809]: I0930 02:15:31.982119 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sqcv6" event={"ID":"d3e50761-de33-44d4-bbe4-fac4ee3be3cb","Type":"ContainerStarted","Data":"1e49e025f2bdb1b93a2340b33b962852e18e2ab2d2eb973bfd8965497bb1eabf"} Sep 30 02:15:32 crc kubenswrapper[4809]: I0930 02:15:32.006508 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-sqcv6" podStartSLOduration=3.465266095 podStartE2EDuration="7.006491379s" podCreationTimestamp="2025-09-30 02:15:25 +0000 UTC" firstStartedPulling="2025-09-30 02:15:27.910748812 +0000 UTC m=+7578.946998220" lastFinishedPulling="2025-09-30 02:15:31.451974086 +0000 UTC m=+7582.488223504" observedRunningTime="2025-09-30 02:15:32.003090226 +0000 UTC m=+7583.039339634" watchObservedRunningTime="2025-09-30 02:15:32.006491379 +0000 UTC m=+7583.042740787" Sep 30 02:15:36 crc kubenswrapper[4809]: I0930 02:15:36.261693 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-sqcv6" Sep 30 02:15:36 crc kubenswrapper[4809]: I0930 02:15:36.263901 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-sqcv6" Sep 30 02:15:36 crc kubenswrapper[4809]: I0930 02:15:36.328900 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-sqcv6" Sep 30 02:15:37 crc kubenswrapper[4809]: I0930 02:15:37.087845 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-sqcv6" Sep 30 02:15:37 crc kubenswrapper[4809]: I0930 02:15:37.137863 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sqcv6"] Sep 30 02:15:39 crc kubenswrapper[4809]: I0930 02:15:39.061105 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-sqcv6" podUID="d3e50761-de33-44d4-bbe4-fac4ee3be3cb" containerName="registry-server" containerID="cri-o://1e49e025f2bdb1b93a2340b33b962852e18e2ab2d2eb973bfd8965497bb1eabf" gracePeriod=2 Sep 30 02:15:39 crc kubenswrapper[4809]: I0930 02:15:39.674034 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sqcv6" Sep 30 02:15:39 crc kubenswrapper[4809]: I0930 02:15:39.846775 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lct25\" (UniqueName: \"kubernetes.io/projected/d3e50761-de33-44d4-bbe4-fac4ee3be3cb-kube-api-access-lct25\") pod \"d3e50761-de33-44d4-bbe4-fac4ee3be3cb\" (UID: \"d3e50761-de33-44d4-bbe4-fac4ee3be3cb\") " Sep 30 02:15:39 crc kubenswrapper[4809]: I0930 02:15:39.846941 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3e50761-de33-44d4-bbe4-fac4ee3be3cb-catalog-content\") pod \"d3e50761-de33-44d4-bbe4-fac4ee3be3cb\" (UID: \"d3e50761-de33-44d4-bbe4-fac4ee3be3cb\") " Sep 30 02:15:39 crc kubenswrapper[4809]: I0930 02:15:39.847080 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3e50761-de33-44d4-bbe4-fac4ee3be3cb-utilities\") pod \"d3e50761-de33-44d4-bbe4-fac4ee3be3cb\" (UID: \"d3e50761-de33-44d4-bbe4-fac4ee3be3cb\") " Sep 30 02:15:39 crc kubenswrapper[4809]: I0930 02:15:39.848497 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3e50761-de33-44d4-bbe4-fac4ee3be3cb-utilities" (OuterVolumeSpecName: "utilities") pod "d3e50761-de33-44d4-bbe4-fac4ee3be3cb" (UID: "d3e50761-de33-44d4-bbe4-fac4ee3be3cb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:15:39 crc kubenswrapper[4809]: I0930 02:15:39.852113 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3e50761-de33-44d4-bbe4-fac4ee3be3cb-kube-api-access-lct25" (OuterVolumeSpecName: "kube-api-access-lct25") pod "d3e50761-de33-44d4-bbe4-fac4ee3be3cb" (UID: "d3e50761-de33-44d4-bbe4-fac4ee3be3cb"). InnerVolumeSpecName "kube-api-access-lct25". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:15:39 crc kubenswrapper[4809]: I0930 02:15:39.897606 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3e50761-de33-44d4-bbe4-fac4ee3be3cb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d3e50761-de33-44d4-bbe4-fac4ee3be3cb" (UID: "d3e50761-de33-44d4-bbe4-fac4ee3be3cb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:15:39 crc kubenswrapper[4809]: I0930 02:15:39.949977 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3e50761-de33-44d4-bbe4-fac4ee3be3cb-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 02:15:39 crc kubenswrapper[4809]: I0930 02:15:39.950024 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lct25\" (UniqueName: \"kubernetes.io/projected/d3e50761-de33-44d4-bbe4-fac4ee3be3cb-kube-api-access-lct25\") on node \"crc\" DevicePath \"\"" Sep 30 02:15:39 crc kubenswrapper[4809]: I0930 02:15:39.950040 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3e50761-de33-44d4-bbe4-fac4ee3be3cb-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 02:15:40 crc kubenswrapper[4809]: I0930 02:15:40.086553 4809 generic.go:334] "Generic (PLEG): container finished" podID="d3e50761-de33-44d4-bbe4-fac4ee3be3cb" containerID="1e49e025f2bdb1b93a2340b33b962852e18e2ab2d2eb973bfd8965497bb1eabf" exitCode=0 Sep 30 02:15:40 crc kubenswrapper[4809]: I0930 02:15:40.086595 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sqcv6" event={"ID":"d3e50761-de33-44d4-bbe4-fac4ee3be3cb","Type":"ContainerDied","Data":"1e49e025f2bdb1b93a2340b33b962852e18e2ab2d2eb973bfd8965497bb1eabf"} Sep 30 02:15:40 crc kubenswrapper[4809]: I0930 02:15:40.086620 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sqcv6" event={"ID":"d3e50761-de33-44d4-bbe4-fac4ee3be3cb","Type":"ContainerDied","Data":"70d07cd9d9679729329aa1e9d48c1bb16c503f85ec02ca53712b3ec6cee446e5"} Sep 30 02:15:40 crc kubenswrapper[4809]: I0930 02:15:40.086650 4809 scope.go:117] "RemoveContainer" containerID="1e49e025f2bdb1b93a2340b33b962852e18e2ab2d2eb973bfd8965497bb1eabf" Sep 30 02:15:40 crc kubenswrapper[4809]: I0930 02:15:40.086782 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sqcv6" Sep 30 02:15:40 crc kubenswrapper[4809]: I0930 02:15:40.115477 4809 scope.go:117] "RemoveContainer" containerID="f6ea9c67e7b8b0f4912e55b22eb5c377c2914796e61137334ce3d680bf8bfd45" Sep 30 02:15:40 crc kubenswrapper[4809]: I0930 02:15:40.134124 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sqcv6"] Sep 30 02:15:40 crc kubenswrapper[4809]: I0930 02:15:40.147127 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-sqcv6"] Sep 30 02:15:40 crc kubenswrapper[4809]: I0930 02:15:40.169183 4809 scope.go:117] "RemoveContainer" containerID="936e3ce2b8ccc40264cd5edbf55e6a86a699140af76b0085a9872c1abc353a3f" Sep 30 02:15:40 crc kubenswrapper[4809]: I0930 02:15:40.204361 4809 scope.go:117] "RemoveContainer" containerID="1e49e025f2bdb1b93a2340b33b962852e18e2ab2d2eb973bfd8965497bb1eabf" Sep 30 02:15:40 crc kubenswrapper[4809]: E0930 02:15:40.204760 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e49e025f2bdb1b93a2340b33b962852e18e2ab2d2eb973bfd8965497bb1eabf\": container with ID starting with 1e49e025f2bdb1b93a2340b33b962852e18e2ab2d2eb973bfd8965497bb1eabf not found: ID does not exist" containerID="1e49e025f2bdb1b93a2340b33b962852e18e2ab2d2eb973bfd8965497bb1eabf" Sep 30 02:15:40 crc kubenswrapper[4809]: I0930 02:15:40.204810 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e49e025f2bdb1b93a2340b33b962852e18e2ab2d2eb973bfd8965497bb1eabf"} err="failed to get container status \"1e49e025f2bdb1b93a2340b33b962852e18e2ab2d2eb973bfd8965497bb1eabf\": rpc error: code = NotFound desc = could not find container \"1e49e025f2bdb1b93a2340b33b962852e18e2ab2d2eb973bfd8965497bb1eabf\": container with ID starting with 1e49e025f2bdb1b93a2340b33b962852e18e2ab2d2eb973bfd8965497bb1eabf not found: ID does not exist" Sep 30 02:15:40 crc kubenswrapper[4809]: I0930 02:15:40.204831 4809 scope.go:117] "RemoveContainer" containerID="f6ea9c67e7b8b0f4912e55b22eb5c377c2914796e61137334ce3d680bf8bfd45" Sep 30 02:15:40 crc kubenswrapper[4809]: E0930 02:15:40.205098 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6ea9c67e7b8b0f4912e55b22eb5c377c2914796e61137334ce3d680bf8bfd45\": container with ID starting with f6ea9c67e7b8b0f4912e55b22eb5c377c2914796e61137334ce3d680bf8bfd45 not found: ID does not exist" containerID="f6ea9c67e7b8b0f4912e55b22eb5c377c2914796e61137334ce3d680bf8bfd45" Sep 30 02:15:40 crc kubenswrapper[4809]: I0930 02:15:40.205122 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6ea9c67e7b8b0f4912e55b22eb5c377c2914796e61137334ce3d680bf8bfd45"} err="failed to get container status \"f6ea9c67e7b8b0f4912e55b22eb5c377c2914796e61137334ce3d680bf8bfd45\": rpc error: code = NotFound desc = could not find container \"f6ea9c67e7b8b0f4912e55b22eb5c377c2914796e61137334ce3d680bf8bfd45\": container with ID starting with f6ea9c67e7b8b0f4912e55b22eb5c377c2914796e61137334ce3d680bf8bfd45 not found: ID does not exist" Sep 30 02:15:40 crc kubenswrapper[4809]: I0930 02:15:40.205168 4809 scope.go:117] "RemoveContainer" containerID="936e3ce2b8ccc40264cd5edbf55e6a86a699140af76b0085a9872c1abc353a3f" Sep 30 02:15:40 crc kubenswrapper[4809]: E0930 02:15:40.205438 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"936e3ce2b8ccc40264cd5edbf55e6a86a699140af76b0085a9872c1abc353a3f\": container with ID starting with 936e3ce2b8ccc40264cd5edbf55e6a86a699140af76b0085a9872c1abc353a3f not found: ID does not exist" containerID="936e3ce2b8ccc40264cd5edbf55e6a86a699140af76b0085a9872c1abc353a3f" Sep 30 02:15:40 crc kubenswrapper[4809]: I0930 02:15:40.205487 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"936e3ce2b8ccc40264cd5edbf55e6a86a699140af76b0085a9872c1abc353a3f"} err="failed to get container status \"936e3ce2b8ccc40264cd5edbf55e6a86a699140af76b0085a9872c1abc353a3f\": rpc error: code = NotFound desc = could not find container \"936e3ce2b8ccc40264cd5edbf55e6a86a699140af76b0085a9872c1abc353a3f\": container with ID starting with 936e3ce2b8ccc40264cd5edbf55e6a86a699140af76b0085a9872c1abc353a3f not found: ID does not exist" Sep 30 02:15:41 crc kubenswrapper[4809]: I0930 02:15:41.705898 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3e50761-de33-44d4-bbe4-fac4ee3be3cb" path="/var/lib/kubelet/pods/d3e50761-de33-44d4-bbe4-fac4ee3be3cb/volumes" Sep 30 02:17:25 crc kubenswrapper[4809]: I0930 02:17:25.324598 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:17:25 crc kubenswrapper[4809]: I0930 02:17:25.325085 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:17:55 crc kubenswrapper[4809]: I0930 02:17:55.325445 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:17:55 crc kubenswrapper[4809]: I0930 02:17:55.327399 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:18:25 crc kubenswrapper[4809]: I0930 02:18:25.325516 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:18:25 crc kubenswrapper[4809]: I0930 02:18:25.326340 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:18:25 crc kubenswrapper[4809]: I0930 02:18:25.326525 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 02:18:25 crc kubenswrapper[4809]: I0930 02:18:25.328975 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 02:18:25 crc kubenswrapper[4809]: I0930 02:18:25.329355 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" gracePeriod=600 Sep 30 02:18:25 crc kubenswrapper[4809]: E0930 02:18:25.482400 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:18:26 crc kubenswrapper[4809]: I0930 02:18:26.089537 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" exitCode=0 Sep 30 02:18:26 crc kubenswrapper[4809]: I0930 02:18:26.089586 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7"} Sep 30 02:18:26 crc kubenswrapper[4809]: I0930 02:18:26.089639 4809 scope.go:117] "RemoveContainer" containerID="e91c8c4896ef8d900599ba6b9bdad60e9cdebe2df1b84beee0f23cdb7c39b382" Sep 30 02:18:26 crc kubenswrapper[4809]: I0930 02:18:26.090710 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:18:26 crc kubenswrapper[4809]: E0930 02:18:26.091386 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:18:39 crc kubenswrapper[4809]: I0930 02:18:39.706090 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:18:39 crc kubenswrapper[4809]: E0930 02:18:39.706914 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:18:52 crc kubenswrapper[4809]: I0930 02:18:52.690889 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:18:52 crc kubenswrapper[4809]: E0930 02:18:52.691617 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:19:06 crc kubenswrapper[4809]: I0930 02:19:06.692394 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:19:06 crc kubenswrapper[4809]: E0930 02:19:06.693747 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:19:19 crc kubenswrapper[4809]: I0930 02:19:19.698657 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:19:19 crc kubenswrapper[4809]: E0930 02:19:19.699475 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:19:31 crc kubenswrapper[4809]: I0930 02:19:31.691580 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:19:31 crc kubenswrapper[4809]: E0930 02:19:31.692582 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:19:44 crc kubenswrapper[4809]: I0930 02:19:44.691386 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:19:44 crc kubenswrapper[4809]: E0930 02:19:44.692075 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:19:57 crc kubenswrapper[4809]: I0930 02:19:57.691794 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:19:57 crc kubenswrapper[4809]: E0930 02:19:57.693054 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:20:08 crc kubenswrapper[4809]: I0930 02:20:08.690949 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:20:08 crc kubenswrapper[4809]: E0930 02:20:08.693462 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:20:23 crc kubenswrapper[4809]: I0930 02:20:23.691973 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:20:23 crc kubenswrapper[4809]: E0930 02:20:23.693396 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:20:38 crc kubenswrapper[4809]: I0930 02:20:38.690900 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:20:38 crc kubenswrapper[4809]: E0930 02:20:38.692274 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:20:50 crc kubenswrapper[4809]: I0930 02:20:50.691696 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:20:50 crc kubenswrapper[4809]: E0930 02:20:50.692612 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:21:04 crc kubenswrapper[4809]: I0930 02:21:04.691572 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:21:04 crc kubenswrapper[4809]: E0930 02:21:04.692326 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:21:16 crc kubenswrapper[4809]: I0930 02:21:16.690914 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:21:16 crc kubenswrapper[4809]: E0930 02:21:16.692108 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:21:29 crc kubenswrapper[4809]: I0930 02:21:29.712522 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:21:29 crc kubenswrapper[4809]: E0930 02:21:29.715577 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:21:44 crc kubenswrapper[4809]: I0930 02:21:44.691557 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:21:44 crc kubenswrapper[4809]: E0930 02:21:44.692736 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:21:57 crc kubenswrapper[4809]: I0930 02:21:57.691072 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:21:57 crc kubenswrapper[4809]: E0930 02:21:57.692433 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:22:11 crc kubenswrapper[4809]: I0930 02:22:11.692055 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:22:11 crc kubenswrapper[4809]: E0930 02:22:11.692889 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:22:23 crc kubenswrapper[4809]: I0930 02:22:23.691622 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:22:23 crc kubenswrapper[4809]: E0930 02:22:23.692700 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:22:35 crc kubenswrapper[4809]: I0930 02:22:35.691904 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:22:35 crc kubenswrapper[4809]: E0930 02:22:35.693218 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:22:46 crc kubenswrapper[4809]: I0930 02:22:46.691790 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:22:46 crc kubenswrapper[4809]: E0930 02:22:46.692715 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:22:57 crc kubenswrapper[4809]: I0930 02:22:57.692253 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:22:57 crc kubenswrapper[4809]: E0930 02:22:57.693246 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:23:08 crc kubenswrapper[4809]: I0930 02:23:08.691032 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:23:08 crc kubenswrapper[4809]: E0930 02:23:08.691780 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:23:09 crc kubenswrapper[4809]: E0930 02:23:09.719529 4809 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.129.56.24:49204->38.129.56.24:38341: write tcp 38.129.56.24:49204->38.129.56.24:38341: write: broken pipe Sep 30 02:23:23 crc kubenswrapper[4809]: I0930 02:23:23.691469 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:23:23 crc kubenswrapper[4809]: E0930 02:23:23.692301 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:23:36 crc kubenswrapper[4809]: I0930 02:23:36.691471 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:23:36 crc kubenswrapper[4809]: I0930 02:23:36.979263 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"36e332a7202cf82bf8991adf92263ecd4ddf4c08f831b8fd1bb95ae252eee0eb"} Sep 30 02:24:18 crc kubenswrapper[4809]: I0930 02:24:18.836345 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-plfwn"] Sep 30 02:24:18 crc kubenswrapper[4809]: E0930 02:24:18.846450 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3e50761-de33-44d4-bbe4-fac4ee3be3cb" containerName="extract-utilities" Sep 30 02:24:18 crc kubenswrapper[4809]: I0930 02:24:18.846483 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3e50761-de33-44d4-bbe4-fac4ee3be3cb" containerName="extract-utilities" Sep 30 02:24:18 crc kubenswrapper[4809]: E0930 02:24:18.846516 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3e50761-de33-44d4-bbe4-fac4ee3be3cb" containerName="registry-server" Sep 30 02:24:18 crc kubenswrapper[4809]: I0930 02:24:18.846527 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3e50761-de33-44d4-bbe4-fac4ee3be3cb" containerName="registry-server" Sep 30 02:24:18 crc kubenswrapper[4809]: E0930 02:24:18.846570 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3e50761-de33-44d4-bbe4-fac4ee3be3cb" containerName="extract-content" Sep 30 02:24:18 crc kubenswrapper[4809]: I0930 02:24:18.846580 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3e50761-de33-44d4-bbe4-fac4ee3be3cb" containerName="extract-content" Sep 30 02:24:18 crc kubenswrapper[4809]: I0930 02:24:18.846980 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3e50761-de33-44d4-bbe4-fac4ee3be3cb" containerName="registry-server" Sep 30 02:24:18 crc kubenswrapper[4809]: I0930 02:24:18.849486 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-plfwn" Sep 30 02:24:18 crc kubenswrapper[4809]: I0930 02:24:18.853228 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-plfwn"] Sep 30 02:24:19 crc kubenswrapper[4809]: I0930 02:24:19.019157 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79b9h\" (UniqueName: \"kubernetes.io/projected/67e601ac-cfff-4e3e-bda8-7fde2bcc37f1-kube-api-access-79b9h\") pod \"certified-operators-plfwn\" (UID: \"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1\") " pod="openshift-marketplace/certified-operators-plfwn" Sep 30 02:24:19 crc kubenswrapper[4809]: I0930 02:24:19.019817 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67e601ac-cfff-4e3e-bda8-7fde2bcc37f1-utilities\") pod \"certified-operators-plfwn\" (UID: \"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1\") " pod="openshift-marketplace/certified-operators-plfwn" Sep 30 02:24:19 crc kubenswrapper[4809]: I0930 02:24:19.020208 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67e601ac-cfff-4e3e-bda8-7fde2bcc37f1-catalog-content\") pod \"certified-operators-plfwn\" (UID: \"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1\") " pod="openshift-marketplace/certified-operators-plfwn" Sep 30 02:24:19 crc kubenswrapper[4809]: I0930 02:24:19.122774 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67e601ac-cfff-4e3e-bda8-7fde2bcc37f1-utilities\") pod \"certified-operators-plfwn\" (UID: \"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1\") " pod="openshift-marketplace/certified-operators-plfwn" Sep 30 02:24:19 crc kubenswrapper[4809]: I0930 02:24:19.122938 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67e601ac-cfff-4e3e-bda8-7fde2bcc37f1-catalog-content\") pod \"certified-operators-plfwn\" (UID: \"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1\") " pod="openshift-marketplace/certified-operators-plfwn" Sep 30 02:24:19 crc kubenswrapper[4809]: I0930 02:24:19.123116 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79b9h\" (UniqueName: \"kubernetes.io/projected/67e601ac-cfff-4e3e-bda8-7fde2bcc37f1-kube-api-access-79b9h\") pod \"certified-operators-plfwn\" (UID: \"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1\") " pod="openshift-marketplace/certified-operators-plfwn" Sep 30 02:24:19 crc kubenswrapper[4809]: I0930 02:24:19.123376 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67e601ac-cfff-4e3e-bda8-7fde2bcc37f1-utilities\") pod \"certified-operators-plfwn\" (UID: \"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1\") " pod="openshift-marketplace/certified-operators-plfwn" Sep 30 02:24:19 crc kubenswrapper[4809]: I0930 02:24:19.123686 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67e601ac-cfff-4e3e-bda8-7fde2bcc37f1-catalog-content\") pod \"certified-operators-plfwn\" (UID: \"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1\") " pod="openshift-marketplace/certified-operators-plfwn" Sep 30 02:24:19 crc kubenswrapper[4809]: I0930 02:24:19.145875 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79b9h\" (UniqueName: \"kubernetes.io/projected/67e601ac-cfff-4e3e-bda8-7fde2bcc37f1-kube-api-access-79b9h\") pod \"certified-operators-plfwn\" (UID: \"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1\") " pod="openshift-marketplace/certified-operators-plfwn" Sep 30 02:24:19 crc kubenswrapper[4809]: I0930 02:24:19.174741 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-plfwn" Sep 30 02:24:19 crc kubenswrapper[4809]: I0930 02:24:19.751280 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-plfwn"] Sep 30 02:24:20 crc kubenswrapper[4809]: I0930 02:24:20.520979 4809 generic.go:334] "Generic (PLEG): container finished" podID="67e601ac-cfff-4e3e-bda8-7fde2bcc37f1" containerID="49cd15288fcdff734cbb2e3193fd9149fd6782b62b933528d4a6d6a40182a985" exitCode=0 Sep 30 02:24:20 crc kubenswrapper[4809]: I0930 02:24:20.521165 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-plfwn" event={"ID":"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1","Type":"ContainerDied","Data":"49cd15288fcdff734cbb2e3193fd9149fd6782b62b933528d4a6d6a40182a985"} Sep 30 02:24:20 crc kubenswrapper[4809]: I0930 02:24:20.521261 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-plfwn" event={"ID":"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1","Type":"ContainerStarted","Data":"623a1f5ef6d71bc6b6247e212b3bc500b234994a442fc0a07a2cd96f7baa5dad"} Sep 30 02:24:20 crc kubenswrapper[4809]: I0930 02:24:20.524889 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 02:24:22 crc kubenswrapper[4809]: I0930 02:24:22.551406 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-plfwn" event={"ID":"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1","Type":"ContainerStarted","Data":"b6afc82383de17697a984dae557b8430e3c7f37ecaa45ce4c6848c7d0a4b78be"} Sep 30 02:24:24 crc kubenswrapper[4809]: I0930 02:24:24.178779 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zmn7h"] Sep 30 02:24:24 crc kubenswrapper[4809]: I0930 02:24:24.182365 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zmn7h" Sep 30 02:24:24 crc kubenswrapper[4809]: I0930 02:24:24.208918 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zmn7h"] Sep 30 02:24:24 crc kubenswrapper[4809]: I0930 02:24:24.351441 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jd7pd\" (UniqueName: \"kubernetes.io/projected/e1a88764-914e-409e-899e-aaf225fac4eb-kube-api-access-jd7pd\") pod \"redhat-marketplace-zmn7h\" (UID: \"e1a88764-914e-409e-899e-aaf225fac4eb\") " pod="openshift-marketplace/redhat-marketplace-zmn7h" Sep 30 02:24:24 crc kubenswrapper[4809]: I0930 02:24:24.351629 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1a88764-914e-409e-899e-aaf225fac4eb-catalog-content\") pod \"redhat-marketplace-zmn7h\" (UID: \"e1a88764-914e-409e-899e-aaf225fac4eb\") " pod="openshift-marketplace/redhat-marketplace-zmn7h" Sep 30 02:24:24 crc kubenswrapper[4809]: I0930 02:24:24.351693 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1a88764-914e-409e-899e-aaf225fac4eb-utilities\") pod \"redhat-marketplace-zmn7h\" (UID: \"e1a88764-914e-409e-899e-aaf225fac4eb\") " pod="openshift-marketplace/redhat-marketplace-zmn7h" Sep 30 02:24:24 crc kubenswrapper[4809]: I0930 02:24:24.454108 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1a88764-914e-409e-899e-aaf225fac4eb-catalog-content\") pod \"redhat-marketplace-zmn7h\" (UID: \"e1a88764-914e-409e-899e-aaf225fac4eb\") " pod="openshift-marketplace/redhat-marketplace-zmn7h" Sep 30 02:24:24 crc kubenswrapper[4809]: I0930 02:24:24.454189 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1a88764-914e-409e-899e-aaf225fac4eb-utilities\") pod \"redhat-marketplace-zmn7h\" (UID: \"e1a88764-914e-409e-899e-aaf225fac4eb\") " pod="openshift-marketplace/redhat-marketplace-zmn7h" Sep 30 02:24:24 crc kubenswrapper[4809]: I0930 02:24:24.454263 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jd7pd\" (UniqueName: \"kubernetes.io/projected/e1a88764-914e-409e-899e-aaf225fac4eb-kube-api-access-jd7pd\") pod \"redhat-marketplace-zmn7h\" (UID: \"e1a88764-914e-409e-899e-aaf225fac4eb\") " pod="openshift-marketplace/redhat-marketplace-zmn7h" Sep 30 02:24:24 crc kubenswrapper[4809]: I0930 02:24:24.454625 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1a88764-914e-409e-899e-aaf225fac4eb-catalog-content\") pod \"redhat-marketplace-zmn7h\" (UID: \"e1a88764-914e-409e-899e-aaf225fac4eb\") " pod="openshift-marketplace/redhat-marketplace-zmn7h" Sep 30 02:24:24 crc kubenswrapper[4809]: I0930 02:24:24.454728 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1a88764-914e-409e-899e-aaf225fac4eb-utilities\") pod \"redhat-marketplace-zmn7h\" (UID: \"e1a88764-914e-409e-899e-aaf225fac4eb\") " pod="openshift-marketplace/redhat-marketplace-zmn7h" Sep 30 02:24:24 crc kubenswrapper[4809]: I0930 02:24:24.472796 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jd7pd\" (UniqueName: \"kubernetes.io/projected/e1a88764-914e-409e-899e-aaf225fac4eb-kube-api-access-jd7pd\") pod \"redhat-marketplace-zmn7h\" (UID: \"e1a88764-914e-409e-899e-aaf225fac4eb\") " pod="openshift-marketplace/redhat-marketplace-zmn7h" Sep 30 02:24:24 crc kubenswrapper[4809]: I0930 02:24:24.523322 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zmn7h" Sep 30 02:24:24 crc kubenswrapper[4809]: I0930 02:24:24.584966 4809 generic.go:334] "Generic (PLEG): container finished" podID="67e601ac-cfff-4e3e-bda8-7fde2bcc37f1" containerID="b6afc82383de17697a984dae557b8430e3c7f37ecaa45ce4c6848c7d0a4b78be" exitCode=0 Sep 30 02:24:24 crc kubenswrapper[4809]: I0930 02:24:24.585024 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-plfwn" event={"ID":"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1","Type":"ContainerDied","Data":"b6afc82383de17697a984dae557b8430e3c7f37ecaa45ce4c6848c7d0a4b78be"} Sep 30 02:24:25 crc kubenswrapper[4809]: W0930 02:24:25.075805 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1a88764_914e_409e_899e_aaf225fac4eb.slice/crio-c990fd7e9084a690203d5226e5d5f0fcd052cf4dc2f2bef46ae1967c67013ef6 WatchSource:0}: Error finding container c990fd7e9084a690203d5226e5d5f0fcd052cf4dc2f2bef46ae1967c67013ef6: Status 404 returned error can't find the container with id c990fd7e9084a690203d5226e5d5f0fcd052cf4dc2f2bef46ae1967c67013ef6 Sep 30 02:24:25 crc kubenswrapper[4809]: I0930 02:24:25.080570 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zmn7h"] Sep 30 02:24:25 crc kubenswrapper[4809]: E0930 02:24:25.632164 4809 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1a88764_914e_409e_899e_aaf225fac4eb.slice/crio-7e832c3d2653b76b1cad02c7b95e6f297cb955591e8597660c5587d2b16e9184.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1a88764_914e_409e_899e_aaf225fac4eb.slice/crio-conmon-7e832c3d2653b76b1cad02c7b95e6f297cb955591e8597660c5587d2b16e9184.scope\": RecentStats: unable to find data in memory cache]" Sep 30 02:24:25 crc kubenswrapper[4809]: I0930 02:24:25.639857 4809 generic.go:334] "Generic (PLEG): container finished" podID="e1a88764-914e-409e-899e-aaf225fac4eb" containerID="7e832c3d2653b76b1cad02c7b95e6f297cb955591e8597660c5587d2b16e9184" exitCode=0 Sep 30 02:24:25 crc kubenswrapper[4809]: I0930 02:24:25.639963 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zmn7h" event={"ID":"e1a88764-914e-409e-899e-aaf225fac4eb","Type":"ContainerDied","Data":"7e832c3d2653b76b1cad02c7b95e6f297cb955591e8597660c5587d2b16e9184"} Sep 30 02:24:25 crc kubenswrapper[4809]: I0930 02:24:25.639997 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zmn7h" event={"ID":"e1a88764-914e-409e-899e-aaf225fac4eb","Type":"ContainerStarted","Data":"c990fd7e9084a690203d5226e5d5f0fcd052cf4dc2f2bef46ae1967c67013ef6"} Sep 30 02:24:25 crc kubenswrapper[4809]: I0930 02:24:25.665902 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-plfwn" event={"ID":"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1","Type":"ContainerStarted","Data":"350358b1dc708cc81cfa7d3ebabecd6fc21ed96785a8126d4c7a20dd6e4fe9f8"} Sep 30 02:24:25 crc kubenswrapper[4809]: I0930 02:24:25.735000 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-plfwn" podStartSLOduration=3.002082949 podStartE2EDuration="7.734974333s" podCreationTimestamp="2025-09-30 02:24:18 +0000 UTC" firstStartedPulling="2025-09-30 02:24:20.5244433 +0000 UTC m=+8111.560692728" lastFinishedPulling="2025-09-30 02:24:25.257334704 +0000 UTC m=+8116.293584112" observedRunningTime="2025-09-30 02:24:25.70996446 +0000 UTC m=+8116.746213868" watchObservedRunningTime="2025-09-30 02:24:25.734974333 +0000 UTC m=+8116.771223741" Sep 30 02:24:26 crc kubenswrapper[4809]: I0930 02:24:26.701439 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zmn7h" event={"ID":"e1a88764-914e-409e-899e-aaf225fac4eb","Type":"ContainerStarted","Data":"f8fe6d9653ab14011dd75df16f89594d4da02c2620f7a0ab3b03f22889c3470a"} Sep 30 02:24:27 crc kubenswrapper[4809]: I0930 02:24:27.723473 4809 generic.go:334] "Generic (PLEG): container finished" podID="e1a88764-914e-409e-899e-aaf225fac4eb" containerID="f8fe6d9653ab14011dd75df16f89594d4da02c2620f7a0ab3b03f22889c3470a" exitCode=0 Sep 30 02:24:27 crc kubenswrapper[4809]: I0930 02:24:27.723584 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zmn7h" event={"ID":"e1a88764-914e-409e-899e-aaf225fac4eb","Type":"ContainerDied","Data":"f8fe6d9653ab14011dd75df16f89594d4da02c2620f7a0ab3b03f22889c3470a"} Sep 30 02:24:28 crc kubenswrapper[4809]: I0930 02:24:28.737556 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zmn7h" event={"ID":"e1a88764-914e-409e-899e-aaf225fac4eb","Type":"ContainerStarted","Data":"b3e38d27886a48479aa4af0f399c1863797e3a2b5a1c30fd7fb679f396268b0f"} Sep 30 02:24:28 crc kubenswrapper[4809]: I0930 02:24:28.761884 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zmn7h" podStartSLOduration=2.254844917 podStartE2EDuration="4.761862381s" podCreationTimestamp="2025-09-30 02:24:24 +0000 UTC" firstStartedPulling="2025-09-30 02:24:25.646846418 +0000 UTC m=+8116.683095826" lastFinishedPulling="2025-09-30 02:24:28.153863842 +0000 UTC m=+8119.190113290" observedRunningTime="2025-09-30 02:24:28.754262413 +0000 UTC m=+8119.790511831" watchObservedRunningTime="2025-09-30 02:24:28.761862381 +0000 UTC m=+8119.798111789" Sep 30 02:24:29 crc kubenswrapper[4809]: I0930 02:24:29.175684 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-plfwn" Sep 30 02:24:29 crc kubenswrapper[4809]: I0930 02:24:29.175743 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-plfwn" Sep 30 02:24:29 crc kubenswrapper[4809]: I0930 02:24:29.242347 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-plfwn" Sep 30 02:24:34 crc kubenswrapper[4809]: I0930 02:24:34.524418 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zmn7h" Sep 30 02:24:34 crc kubenswrapper[4809]: I0930 02:24:34.525097 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zmn7h" Sep 30 02:24:34 crc kubenswrapper[4809]: I0930 02:24:34.585251 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zmn7h" Sep 30 02:24:34 crc kubenswrapper[4809]: I0930 02:24:34.870002 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zmn7h" Sep 30 02:24:34 crc kubenswrapper[4809]: I0930 02:24:34.924293 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zmn7h"] Sep 30 02:24:36 crc kubenswrapper[4809]: I0930 02:24:36.844994 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zmn7h" podUID="e1a88764-914e-409e-899e-aaf225fac4eb" containerName="registry-server" containerID="cri-o://b3e38d27886a48479aa4af0f399c1863797e3a2b5a1c30fd7fb679f396268b0f" gracePeriod=2 Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.402070 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zmn7h" Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.415866 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1a88764-914e-409e-899e-aaf225fac4eb-catalog-content\") pod \"e1a88764-914e-409e-899e-aaf225fac4eb\" (UID: \"e1a88764-914e-409e-899e-aaf225fac4eb\") " Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.415978 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1a88764-914e-409e-899e-aaf225fac4eb-utilities\") pod \"e1a88764-914e-409e-899e-aaf225fac4eb\" (UID: \"e1a88764-914e-409e-899e-aaf225fac4eb\") " Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.416097 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jd7pd\" (UniqueName: \"kubernetes.io/projected/e1a88764-914e-409e-899e-aaf225fac4eb-kube-api-access-jd7pd\") pod \"e1a88764-914e-409e-899e-aaf225fac4eb\" (UID: \"e1a88764-914e-409e-899e-aaf225fac4eb\") " Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.419303 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1a88764-914e-409e-899e-aaf225fac4eb-utilities" (OuterVolumeSpecName: "utilities") pod "e1a88764-914e-409e-899e-aaf225fac4eb" (UID: "e1a88764-914e-409e-899e-aaf225fac4eb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.424861 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1a88764-914e-409e-899e-aaf225fac4eb-kube-api-access-jd7pd" (OuterVolumeSpecName: "kube-api-access-jd7pd") pod "e1a88764-914e-409e-899e-aaf225fac4eb" (UID: "e1a88764-914e-409e-899e-aaf225fac4eb"). InnerVolumeSpecName "kube-api-access-jd7pd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.440019 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1a88764-914e-409e-899e-aaf225fac4eb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e1a88764-914e-409e-899e-aaf225fac4eb" (UID: "e1a88764-914e-409e-899e-aaf225fac4eb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.520000 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jd7pd\" (UniqueName: \"kubernetes.io/projected/e1a88764-914e-409e-899e-aaf225fac4eb-kube-api-access-jd7pd\") on node \"crc\" DevicePath \"\"" Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.520042 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1a88764-914e-409e-899e-aaf225fac4eb-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.520054 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1a88764-914e-409e-899e-aaf225fac4eb-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.858704 4809 generic.go:334] "Generic (PLEG): container finished" podID="e1a88764-914e-409e-899e-aaf225fac4eb" containerID="b3e38d27886a48479aa4af0f399c1863797e3a2b5a1c30fd7fb679f396268b0f" exitCode=0 Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.858760 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zmn7h" Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.858813 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zmn7h" event={"ID":"e1a88764-914e-409e-899e-aaf225fac4eb","Type":"ContainerDied","Data":"b3e38d27886a48479aa4af0f399c1863797e3a2b5a1c30fd7fb679f396268b0f"} Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.859797 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zmn7h" event={"ID":"e1a88764-914e-409e-899e-aaf225fac4eb","Type":"ContainerDied","Data":"c990fd7e9084a690203d5226e5d5f0fcd052cf4dc2f2bef46ae1967c67013ef6"} Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.859838 4809 scope.go:117] "RemoveContainer" containerID="b3e38d27886a48479aa4af0f399c1863797e3a2b5a1c30fd7fb679f396268b0f" Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.886565 4809 scope.go:117] "RemoveContainer" containerID="f8fe6d9653ab14011dd75df16f89594d4da02c2620f7a0ab3b03f22889c3470a" Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.887862 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zmn7h"] Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.899700 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zmn7h"] Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.912138 4809 scope.go:117] "RemoveContainer" containerID="7e832c3d2653b76b1cad02c7b95e6f297cb955591e8597660c5587d2b16e9184" Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.967824 4809 scope.go:117] "RemoveContainer" containerID="b3e38d27886a48479aa4af0f399c1863797e3a2b5a1c30fd7fb679f396268b0f" Sep 30 02:24:37 crc kubenswrapper[4809]: E0930 02:24:37.968456 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3e38d27886a48479aa4af0f399c1863797e3a2b5a1c30fd7fb679f396268b0f\": container with ID starting with b3e38d27886a48479aa4af0f399c1863797e3a2b5a1c30fd7fb679f396268b0f not found: ID does not exist" containerID="b3e38d27886a48479aa4af0f399c1863797e3a2b5a1c30fd7fb679f396268b0f" Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.968516 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3e38d27886a48479aa4af0f399c1863797e3a2b5a1c30fd7fb679f396268b0f"} err="failed to get container status \"b3e38d27886a48479aa4af0f399c1863797e3a2b5a1c30fd7fb679f396268b0f\": rpc error: code = NotFound desc = could not find container \"b3e38d27886a48479aa4af0f399c1863797e3a2b5a1c30fd7fb679f396268b0f\": container with ID starting with b3e38d27886a48479aa4af0f399c1863797e3a2b5a1c30fd7fb679f396268b0f not found: ID does not exist" Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.968549 4809 scope.go:117] "RemoveContainer" containerID="f8fe6d9653ab14011dd75df16f89594d4da02c2620f7a0ab3b03f22889c3470a" Sep 30 02:24:37 crc kubenswrapper[4809]: E0930 02:24:37.969123 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8fe6d9653ab14011dd75df16f89594d4da02c2620f7a0ab3b03f22889c3470a\": container with ID starting with f8fe6d9653ab14011dd75df16f89594d4da02c2620f7a0ab3b03f22889c3470a not found: ID does not exist" containerID="f8fe6d9653ab14011dd75df16f89594d4da02c2620f7a0ab3b03f22889c3470a" Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.969186 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8fe6d9653ab14011dd75df16f89594d4da02c2620f7a0ab3b03f22889c3470a"} err="failed to get container status \"f8fe6d9653ab14011dd75df16f89594d4da02c2620f7a0ab3b03f22889c3470a\": rpc error: code = NotFound desc = could not find container \"f8fe6d9653ab14011dd75df16f89594d4da02c2620f7a0ab3b03f22889c3470a\": container with ID starting with f8fe6d9653ab14011dd75df16f89594d4da02c2620f7a0ab3b03f22889c3470a not found: ID does not exist" Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.969218 4809 scope.go:117] "RemoveContainer" containerID="7e832c3d2653b76b1cad02c7b95e6f297cb955591e8597660c5587d2b16e9184" Sep 30 02:24:37 crc kubenswrapper[4809]: E0930 02:24:37.969544 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e832c3d2653b76b1cad02c7b95e6f297cb955591e8597660c5587d2b16e9184\": container with ID starting with 7e832c3d2653b76b1cad02c7b95e6f297cb955591e8597660c5587d2b16e9184 not found: ID does not exist" containerID="7e832c3d2653b76b1cad02c7b95e6f297cb955591e8597660c5587d2b16e9184" Sep 30 02:24:37 crc kubenswrapper[4809]: I0930 02:24:37.969572 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e832c3d2653b76b1cad02c7b95e6f297cb955591e8597660c5587d2b16e9184"} err="failed to get container status \"7e832c3d2653b76b1cad02c7b95e6f297cb955591e8597660c5587d2b16e9184\": rpc error: code = NotFound desc = could not find container \"7e832c3d2653b76b1cad02c7b95e6f297cb955591e8597660c5587d2b16e9184\": container with ID starting with 7e832c3d2653b76b1cad02c7b95e6f297cb955591e8597660c5587d2b16e9184 not found: ID does not exist" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.242126 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-plfwn" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.341201 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Sep 30 02:24:39 crc kubenswrapper[4809]: E0930 02:24:39.342098 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1a88764-914e-409e-899e-aaf225fac4eb" containerName="registry-server" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.342150 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1a88764-914e-409e-899e-aaf225fac4eb" containerName="registry-server" Sep 30 02:24:39 crc kubenswrapper[4809]: E0930 02:24:39.342216 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1a88764-914e-409e-899e-aaf225fac4eb" containerName="extract-utilities" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.342230 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1a88764-914e-409e-899e-aaf225fac4eb" containerName="extract-utilities" Sep 30 02:24:39 crc kubenswrapper[4809]: E0930 02:24:39.342266 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1a88764-914e-409e-899e-aaf225fac4eb" containerName="extract-content" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.342280 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1a88764-914e-409e-899e-aaf225fac4eb" containerName="extract-content" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.342720 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1a88764-914e-409e-899e-aaf225fac4eb" containerName="registry-server" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.344367 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.347179 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.348402 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.348858 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.350592 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-gm56j" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.354621 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.360102 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/015d3aad-2e8e-4491-bf81-88058e25fe55-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.360295 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/015d3aad-2e8e-4491-bf81-88058e25fe55-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.360428 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/015d3aad-2e8e-4491-bf81-88058e25fe55-config-data\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.462704 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8m2qc\" (UniqueName: \"kubernetes.io/projected/015d3aad-2e8e-4491-bf81-88058e25fe55-kube-api-access-8m2qc\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.462949 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/015d3aad-2e8e-4491-bf81-88058e25fe55-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.463040 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/015d3aad-2e8e-4491-bf81-88058e25fe55-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.463220 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/015d3aad-2e8e-4491-bf81-88058e25fe55-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.463368 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/015d3aad-2e8e-4491-bf81-88058e25fe55-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.463482 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/015d3aad-2e8e-4491-bf81-88058e25fe55-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.463588 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.463616 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/015d3aad-2e8e-4491-bf81-88058e25fe55-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.463758 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/015d3aad-2e8e-4491-bf81-88058e25fe55-config-data\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.465077 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/015d3aad-2e8e-4491-bf81-88058e25fe55-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.465581 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/015d3aad-2e8e-4491-bf81-88058e25fe55-config-data\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.473960 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/015d3aad-2e8e-4491-bf81-88058e25fe55-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.565505 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.565555 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/015d3aad-2e8e-4491-bf81-88058e25fe55-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.565983 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8m2qc\" (UniqueName: \"kubernetes.io/projected/015d3aad-2e8e-4491-bf81-88058e25fe55-kube-api-access-8m2qc\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.566095 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/015d3aad-2e8e-4491-bf81-88058e25fe55-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.566187 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/015d3aad-2e8e-4491-bf81-88058e25fe55-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.566291 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/015d3aad-2e8e-4491-bf81-88058e25fe55-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.566572 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/015d3aad-2e8e-4491-bf81-88058e25fe55-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.567829 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/015d3aad-2e8e-4491-bf81-88058e25fe55-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.568455 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.570458 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/015d3aad-2e8e-4491-bf81-88058e25fe55-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.574734 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/015d3aad-2e8e-4491-bf81-88058e25fe55-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.595038 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8m2qc\" (UniqueName: \"kubernetes.io/projected/015d3aad-2e8e-4491-bf81-88058e25fe55-kube-api-access-8m2qc\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.634723 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.682835 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 30 02:24:39 crc kubenswrapper[4809]: I0930 02:24:39.712962 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1a88764-914e-409e-899e-aaf225fac4eb" path="/var/lib/kubelet/pods/e1a88764-914e-409e-899e-aaf225fac4eb/volumes" Sep 30 02:24:40 crc kubenswrapper[4809]: I0930 02:24:40.223039 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Sep 30 02:24:40 crc kubenswrapper[4809]: W0930 02:24:40.231168 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod015d3aad_2e8e_4491_bf81_88058e25fe55.slice/crio-66e13c9281a58b4601e7b368998459efbbc31e83b1f3fe97214f290e0330eca6 WatchSource:0}: Error finding container 66e13c9281a58b4601e7b368998459efbbc31e83b1f3fe97214f290e0330eca6: Status 404 returned error can't find the container with id 66e13c9281a58b4601e7b368998459efbbc31e83b1f3fe97214f290e0330eca6 Sep 30 02:24:40 crc kubenswrapper[4809]: I0930 02:24:40.234171 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-plfwn"] Sep 30 02:24:40 crc kubenswrapper[4809]: I0930 02:24:40.234416 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-plfwn" podUID="67e601ac-cfff-4e3e-bda8-7fde2bcc37f1" containerName="registry-server" containerID="cri-o://350358b1dc708cc81cfa7d3ebabecd6fc21ed96785a8126d4c7a20dd6e4fe9f8" gracePeriod=2 Sep 30 02:24:40 crc kubenswrapper[4809]: I0930 02:24:40.807403 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-plfwn" Sep 30 02:24:40 crc kubenswrapper[4809]: I0930 02:24:40.899730 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67e601ac-cfff-4e3e-bda8-7fde2bcc37f1-catalog-content\") pod \"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1\" (UID: \"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1\") " Sep 30 02:24:40 crc kubenswrapper[4809]: I0930 02:24:40.899838 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79b9h\" (UniqueName: \"kubernetes.io/projected/67e601ac-cfff-4e3e-bda8-7fde2bcc37f1-kube-api-access-79b9h\") pod \"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1\" (UID: \"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1\") " Sep 30 02:24:40 crc kubenswrapper[4809]: I0930 02:24:40.899928 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67e601ac-cfff-4e3e-bda8-7fde2bcc37f1-utilities\") pod \"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1\" (UID: \"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1\") " Sep 30 02:24:40 crc kubenswrapper[4809]: I0930 02:24:40.900998 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67e601ac-cfff-4e3e-bda8-7fde2bcc37f1-utilities" (OuterVolumeSpecName: "utilities") pod "67e601ac-cfff-4e3e-bda8-7fde2bcc37f1" (UID: "67e601ac-cfff-4e3e-bda8-7fde2bcc37f1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:24:40 crc kubenswrapper[4809]: I0930 02:24:40.901397 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67e601ac-cfff-4e3e-bda8-7fde2bcc37f1-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 02:24:40 crc kubenswrapper[4809]: I0930 02:24:40.906570 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67e601ac-cfff-4e3e-bda8-7fde2bcc37f1-kube-api-access-79b9h" (OuterVolumeSpecName: "kube-api-access-79b9h") pod "67e601ac-cfff-4e3e-bda8-7fde2bcc37f1" (UID: "67e601ac-cfff-4e3e-bda8-7fde2bcc37f1"). InnerVolumeSpecName "kube-api-access-79b9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:24:40 crc kubenswrapper[4809]: I0930 02:24:40.935164 4809 generic.go:334] "Generic (PLEG): container finished" podID="67e601ac-cfff-4e3e-bda8-7fde2bcc37f1" containerID="350358b1dc708cc81cfa7d3ebabecd6fc21ed96785a8126d4c7a20dd6e4fe9f8" exitCode=0 Sep 30 02:24:40 crc kubenswrapper[4809]: I0930 02:24:40.935231 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-plfwn" Sep 30 02:24:40 crc kubenswrapper[4809]: I0930 02:24:40.935268 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-plfwn" event={"ID":"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1","Type":"ContainerDied","Data":"350358b1dc708cc81cfa7d3ebabecd6fc21ed96785a8126d4c7a20dd6e4fe9f8"} Sep 30 02:24:40 crc kubenswrapper[4809]: I0930 02:24:40.935555 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-plfwn" event={"ID":"67e601ac-cfff-4e3e-bda8-7fde2bcc37f1","Type":"ContainerDied","Data":"623a1f5ef6d71bc6b6247e212b3bc500b234994a442fc0a07a2cd96f7baa5dad"} Sep 30 02:24:40 crc kubenswrapper[4809]: I0930 02:24:40.935581 4809 scope.go:117] "RemoveContainer" containerID="350358b1dc708cc81cfa7d3ebabecd6fc21ed96785a8126d4c7a20dd6e4fe9f8" Sep 30 02:24:40 crc kubenswrapper[4809]: I0930 02:24:40.940818 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"015d3aad-2e8e-4491-bf81-88058e25fe55","Type":"ContainerStarted","Data":"66e13c9281a58b4601e7b368998459efbbc31e83b1f3fe97214f290e0330eca6"} Sep 30 02:24:40 crc kubenswrapper[4809]: I0930 02:24:40.958624 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67e601ac-cfff-4e3e-bda8-7fde2bcc37f1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "67e601ac-cfff-4e3e-bda8-7fde2bcc37f1" (UID: "67e601ac-cfff-4e3e-bda8-7fde2bcc37f1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:24:40 crc kubenswrapper[4809]: I0930 02:24:40.973964 4809 scope.go:117] "RemoveContainer" containerID="b6afc82383de17697a984dae557b8430e3c7f37ecaa45ce4c6848c7d0a4b78be" Sep 30 02:24:41 crc kubenswrapper[4809]: I0930 02:24:41.001199 4809 scope.go:117] "RemoveContainer" containerID="49cd15288fcdff734cbb2e3193fd9149fd6782b62b933528d4a6d6a40182a985" Sep 30 02:24:41 crc kubenswrapper[4809]: I0930 02:24:41.003596 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67e601ac-cfff-4e3e-bda8-7fde2bcc37f1-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 02:24:41 crc kubenswrapper[4809]: I0930 02:24:41.003654 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79b9h\" (UniqueName: \"kubernetes.io/projected/67e601ac-cfff-4e3e-bda8-7fde2bcc37f1-kube-api-access-79b9h\") on node \"crc\" DevicePath \"\"" Sep 30 02:24:41 crc kubenswrapper[4809]: I0930 02:24:41.050562 4809 scope.go:117] "RemoveContainer" containerID="350358b1dc708cc81cfa7d3ebabecd6fc21ed96785a8126d4c7a20dd6e4fe9f8" Sep 30 02:24:41 crc kubenswrapper[4809]: E0930 02:24:41.051069 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"350358b1dc708cc81cfa7d3ebabecd6fc21ed96785a8126d4c7a20dd6e4fe9f8\": container with ID starting with 350358b1dc708cc81cfa7d3ebabecd6fc21ed96785a8126d4c7a20dd6e4fe9f8 not found: ID does not exist" containerID="350358b1dc708cc81cfa7d3ebabecd6fc21ed96785a8126d4c7a20dd6e4fe9f8" Sep 30 02:24:41 crc kubenswrapper[4809]: I0930 02:24:41.051117 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"350358b1dc708cc81cfa7d3ebabecd6fc21ed96785a8126d4c7a20dd6e4fe9f8"} err="failed to get container status \"350358b1dc708cc81cfa7d3ebabecd6fc21ed96785a8126d4c7a20dd6e4fe9f8\": rpc error: code = NotFound desc = could not find container \"350358b1dc708cc81cfa7d3ebabecd6fc21ed96785a8126d4c7a20dd6e4fe9f8\": container with ID starting with 350358b1dc708cc81cfa7d3ebabecd6fc21ed96785a8126d4c7a20dd6e4fe9f8 not found: ID does not exist" Sep 30 02:24:41 crc kubenswrapper[4809]: I0930 02:24:41.051153 4809 scope.go:117] "RemoveContainer" containerID="b6afc82383de17697a984dae557b8430e3c7f37ecaa45ce4c6848c7d0a4b78be" Sep 30 02:24:41 crc kubenswrapper[4809]: E0930 02:24:41.051614 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6afc82383de17697a984dae557b8430e3c7f37ecaa45ce4c6848c7d0a4b78be\": container with ID starting with b6afc82383de17697a984dae557b8430e3c7f37ecaa45ce4c6848c7d0a4b78be not found: ID does not exist" containerID="b6afc82383de17697a984dae557b8430e3c7f37ecaa45ce4c6848c7d0a4b78be" Sep 30 02:24:41 crc kubenswrapper[4809]: I0930 02:24:41.051675 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6afc82383de17697a984dae557b8430e3c7f37ecaa45ce4c6848c7d0a4b78be"} err="failed to get container status \"b6afc82383de17697a984dae557b8430e3c7f37ecaa45ce4c6848c7d0a4b78be\": rpc error: code = NotFound desc = could not find container \"b6afc82383de17697a984dae557b8430e3c7f37ecaa45ce4c6848c7d0a4b78be\": container with ID starting with b6afc82383de17697a984dae557b8430e3c7f37ecaa45ce4c6848c7d0a4b78be not found: ID does not exist" Sep 30 02:24:41 crc kubenswrapper[4809]: I0930 02:24:41.051694 4809 scope.go:117] "RemoveContainer" containerID="49cd15288fcdff734cbb2e3193fd9149fd6782b62b933528d4a6d6a40182a985" Sep 30 02:24:41 crc kubenswrapper[4809]: E0930 02:24:41.051974 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49cd15288fcdff734cbb2e3193fd9149fd6782b62b933528d4a6d6a40182a985\": container with ID starting with 49cd15288fcdff734cbb2e3193fd9149fd6782b62b933528d4a6d6a40182a985 not found: ID does not exist" containerID="49cd15288fcdff734cbb2e3193fd9149fd6782b62b933528d4a6d6a40182a985" Sep 30 02:24:41 crc kubenswrapper[4809]: I0930 02:24:41.052004 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49cd15288fcdff734cbb2e3193fd9149fd6782b62b933528d4a6d6a40182a985"} err="failed to get container status \"49cd15288fcdff734cbb2e3193fd9149fd6782b62b933528d4a6d6a40182a985\": rpc error: code = NotFound desc = could not find container \"49cd15288fcdff734cbb2e3193fd9149fd6782b62b933528d4a6d6a40182a985\": container with ID starting with 49cd15288fcdff734cbb2e3193fd9149fd6782b62b933528d4a6d6a40182a985 not found: ID does not exist" Sep 30 02:24:41 crc kubenswrapper[4809]: I0930 02:24:41.275562 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-plfwn"] Sep 30 02:24:41 crc kubenswrapper[4809]: I0930 02:24:41.286320 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-plfwn"] Sep 30 02:24:41 crc kubenswrapper[4809]: I0930 02:24:41.707561 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67e601ac-cfff-4e3e-bda8-7fde2bcc37f1" path="/var/lib/kubelet/pods/67e601ac-cfff-4e3e-bda8-7fde2bcc37f1/volumes" Sep 30 02:25:08 crc kubenswrapper[4809]: E0930 02:25:08.563994 4809 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Sep 30 02:25:08 crc kubenswrapper[4809]: E0930 02:25:08.565914 4809 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8m2qc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(015d3aad-2e8e-4491-bf81-88058e25fe55): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 02:25:08 crc kubenswrapper[4809]: E0930 02:25:08.567412 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="015d3aad-2e8e-4491-bf81-88058e25fe55" Sep 30 02:25:09 crc kubenswrapper[4809]: E0930 02:25:09.291605 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="015d3aad-2e8e-4491-bf81-88058e25fe55" Sep 30 02:25:22 crc kubenswrapper[4809]: I0930 02:25:22.146929 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Sep 30 02:25:24 crc kubenswrapper[4809]: I0930 02:25:24.474145 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"015d3aad-2e8e-4491-bf81-88058e25fe55","Type":"ContainerStarted","Data":"7876c535c150bb0a1aafd15b74f991aa3c4927489ed4b0a7505d22589ec78e08"} Sep 30 02:25:24 crc kubenswrapper[4809]: I0930 02:25:24.507367 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.599319368 podStartE2EDuration="46.507343982s" podCreationTimestamp="2025-09-30 02:24:38 +0000 UTC" firstStartedPulling="2025-09-30 02:24:40.23392913 +0000 UTC m=+8131.270178568" lastFinishedPulling="2025-09-30 02:25:22.141953764 +0000 UTC m=+8173.178203182" observedRunningTime="2025-09-30 02:25:24.497960566 +0000 UTC m=+8175.534210004" watchObservedRunningTime="2025-09-30 02:25:24.507343982 +0000 UTC m=+8175.543593390" Sep 30 02:25:55 crc kubenswrapper[4809]: I0930 02:25:55.325065 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:25:55 crc kubenswrapper[4809]: I0930 02:25:55.325716 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:26:25 crc kubenswrapper[4809]: I0930 02:26:25.324990 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:26:25 crc kubenswrapper[4809]: I0930 02:26:25.327860 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:26:31 crc kubenswrapper[4809]: I0930 02:26:31.327987 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4tx29"] Sep 30 02:26:31 crc kubenswrapper[4809]: E0930 02:26:31.329064 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67e601ac-cfff-4e3e-bda8-7fde2bcc37f1" containerName="extract-content" Sep 30 02:26:31 crc kubenswrapper[4809]: I0930 02:26:31.329078 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="67e601ac-cfff-4e3e-bda8-7fde2bcc37f1" containerName="extract-content" Sep 30 02:26:31 crc kubenswrapper[4809]: E0930 02:26:31.329620 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67e601ac-cfff-4e3e-bda8-7fde2bcc37f1" containerName="registry-server" Sep 30 02:26:31 crc kubenswrapper[4809]: I0930 02:26:31.329632 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="67e601ac-cfff-4e3e-bda8-7fde2bcc37f1" containerName="registry-server" Sep 30 02:26:31 crc kubenswrapper[4809]: E0930 02:26:31.329664 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67e601ac-cfff-4e3e-bda8-7fde2bcc37f1" containerName="extract-utilities" Sep 30 02:26:31 crc kubenswrapper[4809]: I0930 02:26:31.329673 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="67e601ac-cfff-4e3e-bda8-7fde2bcc37f1" containerName="extract-utilities" Sep 30 02:26:31 crc kubenswrapper[4809]: I0930 02:26:31.329937 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="67e601ac-cfff-4e3e-bda8-7fde2bcc37f1" containerName="registry-server" Sep 30 02:26:31 crc kubenswrapper[4809]: I0930 02:26:31.332021 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4tx29" Sep 30 02:26:31 crc kubenswrapper[4809]: I0930 02:26:31.388962 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppw46\" (UniqueName: \"kubernetes.io/projected/529f5724-86da-4d8a-8f47-509c07f2799e-kube-api-access-ppw46\") pod \"community-operators-4tx29\" (UID: \"529f5724-86da-4d8a-8f47-509c07f2799e\") " pod="openshift-marketplace/community-operators-4tx29" Sep 30 02:26:31 crc kubenswrapper[4809]: I0930 02:26:31.389040 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/529f5724-86da-4d8a-8f47-509c07f2799e-utilities\") pod \"community-operators-4tx29\" (UID: \"529f5724-86da-4d8a-8f47-509c07f2799e\") " pod="openshift-marketplace/community-operators-4tx29" Sep 30 02:26:31 crc kubenswrapper[4809]: I0930 02:26:31.389084 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/529f5724-86da-4d8a-8f47-509c07f2799e-catalog-content\") pod \"community-operators-4tx29\" (UID: \"529f5724-86da-4d8a-8f47-509c07f2799e\") " pod="openshift-marketplace/community-operators-4tx29" Sep 30 02:26:31 crc kubenswrapper[4809]: I0930 02:26:31.398999 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4tx29"] Sep 30 02:26:31 crc kubenswrapper[4809]: I0930 02:26:31.491079 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppw46\" (UniqueName: \"kubernetes.io/projected/529f5724-86da-4d8a-8f47-509c07f2799e-kube-api-access-ppw46\") pod \"community-operators-4tx29\" (UID: \"529f5724-86da-4d8a-8f47-509c07f2799e\") " pod="openshift-marketplace/community-operators-4tx29" Sep 30 02:26:31 crc kubenswrapper[4809]: I0930 02:26:31.491143 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/529f5724-86da-4d8a-8f47-509c07f2799e-utilities\") pod \"community-operators-4tx29\" (UID: \"529f5724-86da-4d8a-8f47-509c07f2799e\") " pod="openshift-marketplace/community-operators-4tx29" Sep 30 02:26:31 crc kubenswrapper[4809]: I0930 02:26:31.491193 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/529f5724-86da-4d8a-8f47-509c07f2799e-catalog-content\") pod \"community-operators-4tx29\" (UID: \"529f5724-86da-4d8a-8f47-509c07f2799e\") " pod="openshift-marketplace/community-operators-4tx29" Sep 30 02:26:31 crc kubenswrapper[4809]: I0930 02:26:31.492091 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/529f5724-86da-4d8a-8f47-509c07f2799e-utilities\") pod \"community-operators-4tx29\" (UID: \"529f5724-86da-4d8a-8f47-509c07f2799e\") " pod="openshift-marketplace/community-operators-4tx29" Sep 30 02:26:31 crc kubenswrapper[4809]: I0930 02:26:31.492541 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/529f5724-86da-4d8a-8f47-509c07f2799e-catalog-content\") pod \"community-operators-4tx29\" (UID: \"529f5724-86da-4d8a-8f47-509c07f2799e\") " pod="openshift-marketplace/community-operators-4tx29" Sep 30 02:26:31 crc kubenswrapper[4809]: I0930 02:26:31.524102 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppw46\" (UniqueName: \"kubernetes.io/projected/529f5724-86da-4d8a-8f47-509c07f2799e-kube-api-access-ppw46\") pod \"community-operators-4tx29\" (UID: \"529f5724-86da-4d8a-8f47-509c07f2799e\") " pod="openshift-marketplace/community-operators-4tx29" Sep 30 02:26:31 crc kubenswrapper[4809]: I0930 02:26:31.656529 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4tx29" Sep 30 02:26:32 crc kubenswrapper[4809]: I0930 02:26:32.855221 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4tx29"] Sep 30 02:26:33 crc kubenswrapper[4809]: I0930 02:26:33.369579 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tx29" event={"ID":"529f5724-86da-4d8a-8f47-509c07f2799e","Type":"ContainerDied","Data":"b57b070bdbb16a7830d7a97b813c46752cb2b2bd5ab7d79baf46dc10231bea20"} Sep 30 02:26:33 crc kubenswrapper[4809]: I0930 02:26:33.373957 4809 generic.go:334] "Generic (PLEG): container finished" podID="529f5724-86da-4d8a-8f47-509c07f2799e" containerID="b57b070bdbb16a7830d7a97b813c46752cb2b2bd5ab7d79baf46dc10231bea20" exitCode=0 Sep 30 02:26:33 crc kubenswrapper[4809]: I0930 02:26:33.374402 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tx29" event={"ID":"529f5724-86da-4d8a-8f47-509c07f2799e","Type":"ContainerStarted","Data":"d5651ab35dc5e0f6fb7034b3cd623ad6c18bb6bf08fd1abdd45b604ae6c08d45"} Sep 30 02:26:35 crc kubenswrapper[4809]: I0930 02:26:35.396711 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tx29" event={"ID":"529f5724-86da-4d8a-8f47-509c07f2799e","Type":"ContainerStarted","Data":"be34e0fb8864929aa1f0803c1080191696388752f32dee3f69a9c6cbf302e061"} Sep 30 02:26:36 crc kubenswrapper[4809]: I0930 02:26:36.408491 4809 generic.go:334] "Generic (PLEG): container finished" podID="529f5724-86da-4d8a-8f47-509c07f2799e" containerID="be34e0fb8864929aa1f0803c1080191696388752f32dee3f69a9c6cbf302e061" exitCode=0 Sep 30 02:26:36 crc kubenswrapper[4809]: I0930 02:26:36.408531 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tx29" event={"ID":"529f5724-86da-4d8a-8f47-509c07f2799e","Type":"ContainerDied","Data":"be34e0fb8864929aa1f0803c1080191696388752f32dee3f69a9c6cbf302e061"} Sep 30 02:26:37 crc kubenswrapper[4809]: I0930 02:26:37.435546 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tx29" event={"ID":"529f5724-86da-4d8a-8f47-509c07f2799e","Type":"ContainerStarted","Data":"ec81534ccf462eecb164c73a652c8491244fd5440395a1f0557844169460dd40"} Sep 30 02:26:37 crc kubenswrapper[4809]: I0930 02:26:37.463425 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4tx29" podStartSLOduration=2.799615187 podStartE2EDuration="6.463400711s" podCreationTimestamp="2025-09-30 02:26:31 +0000 UTC" firstStartedPulling="2025-09-30 02:26:33.372779023 +0000 UTC m=+8244.409028431" lastFinishedPulling="2025-09-30 02:26:37.036564547 +0000 UTC m=+8248.072813955" observedRunningTime="2025-09-30 02:26:37.452258697 +0000 UTC m=+8248.488508105" watchObservedRunningTime="2025-09-30 02:26:37.463400711 +0000 UTC m=+8248.499650119" Sep 30 02:26:41 crc kubenswrapper[4809]: I0930 02:26:41.657268 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4tx29" Sep 30 02:26:41 crc kubenswrapper[4809]: I0930 02:26:41.657840 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4tx29" Sep 30 02:26:42 crc kubenswrapper[4809]: I0930 02:26:42.710909 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-4tx29" podUID="529f5724-86da-4d8a-8f47-509c07f2799e" containerName="registry-server" probeResult="failure" output=< Sep 30 02:26:42 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 02:26:42 crc kubenswrapper[4809]: > Sep 30 02:26:51 crc kubenswrapper[4809]: I0930 02:26:51.753599 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4tx29" Sep 30 02:26:51 crc kubenswrapper[4809]: I0930 02:26:51.815786 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4tx29" Sep 30 02:26:52 crc kubenswrapper[4809]: I0930 02:26:52.005483 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4tx29"] Sep 30 02:26:53 crc kubenswrapper[4809]: I0930 02:26:53.639585 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4tx29" podUID="529f5724-86da-4d8a-8f47-509c07f2799e" containerName="registry-server" containerID="cri-o://ec81534ccf462eecb164c73a652c8491244fd5440395a1f0557844169460dd40" gracePeriod=2 Sep 30 02:26:54 crc kubenswrapper[4809]: I0930 02:26:54.647076 4809 generic.go:334] "Generic (PLEG): container finished" podID="529f5724-86da-4d8a-8f47-509c07f2799e" containerID="ec81534ccf462eecb164c73a652c8491244fd5440395a1f0557844169460dd40" exitCode=0 Sep 30 02:26:54 crc kubenswrapper[4809]: I0930 02:26:54.647178 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tx29" event={"ID":"529f5724-86da-4d8a-8f47-509c07f2799e","Type":"ContainerDied","Data":"ec81534ccf462eecb164c73a652c8491244fd5440395a1f0557844169460dd40"} Sep 30 02:26:54 crc kubenswrapper[4809]: I0930 02:26:54.841762 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4tx29" Sep 30 02:26:54 crc kubenswrapper[4809]: I0930 02:26:54.977567 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/529f5724-86da-4d8a-8f47-509c07f2799e-catalog-content\") pod \"529f5724-86da-4d8a-8f47-509c07f2799e\" (UID: \"529f5724-86da-4d8a-8f47-509c07f2799e\") " Sep 30 02:26:54 crc kubenswrapper[4809]: I0930 02:26:54.977992 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppw46\" (UniqueName: \"kubernetes.io/projected/529f5724-86da-4d8a-8f47-509c07f2799e-kube-api-access-ppw46\") pod \"529f5724-86da-4d8a-8f47-509c07f2799e\" (UID: \"529f5724-86da-4d8a-8f47-509c07f2799e\") " Sep 30 02:26:54 crc kubenswrapper[4809]: I0930 02:26:54.978030 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/529f5724-86da-4d8a-8f47-509c07f2799e-utilities\") pod \"529f5724-86da-4d8a-8f47-509c07f2799e\" (UID: \"529f5724-86da-4d8a-8f47-509c07f2799e\") " Sep 30 02:26:54 crc kubenswrapper[4809]: I0930 02:26:54.982395 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/529f5724-86da-4d8a-8f47-509c07f2799e-utilities" (OuterVolumeSpecName: "utilities") pod "529f5724-86da-4d8a-8f47-509c07f2799e" (UID: "529f5724-86da-4d8a-8f47-509c07f2799e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:26:55 crc kubenswrapper[4809]: I0930 02:26:55.010879 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/529f5724-86da-4d8a-8f47-509c07f2799e-kube-api-access-ppw46" (OuterVolumeSpecName: "kube-api-access-ppw46") pod "529f5724-86da-4d8a-8f47-509c07f2799e" (UID: "529f5724-86da-4d8a-8f47-509c07f2799e"). InnerVolumeSpecName "kube-api-access-ppw46". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:26:55 crc kubenswrapper[4809]: I0930 02:26:55.082300 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppw46\" (UniqueName: \"kubernetes.io/projected/529f5724-86da-4d8a-8f47-509c07f2799e-kube-api-access-ppw46\") on node \"crc\" DevicePath \"\"" Sep 30 02:26:55 crc kubenswrapper[4809]: I0930 02:26:55.082359 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/529f5724-86da-4d8a-8f47-509c07f2799e-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 02:26:55 crc kubenswrapper[4809]: I0930 02:26:55.128765 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/529f5724-86da-4d8a-8f47-509c07f2799e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "529f5724-86da-4d8a-8f47-509c07f2799e" (UID: "529f5724-86da-4d8a-8f47-509c07f2799e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:26:55 crc kubenswrapper[4809]: I0930 02:26:55.184484 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/529f5724-86da-4d8a-8f47-509c07f2799e-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 02:26:55 crc kubenswrapper[4809]: I0930 02:26:55.330367 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:26:55 crc kubenswrapper[4809]: I0930 02:26:55.330443 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:26:55 crc kubenswrapper[4809]: I0930 02:26:55.330493 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 02:26:55 crc kubenswrapper[4809]: I0930 02:26:55.331413 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"36e332a7202cf82bf8991adf92263ecd4ddf4c08f831b8fd1bb95ae252eee0eb"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 02:26:55 crc kubenswrapper[4809]: I0930 02:26:55.331489 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://36e332a7202cf82bf8991adf92263ecd4ddf4c08f831b8fd1bb95ae252eee0eb" gracePeriod=600 Sep 30 02:26:55 crc kubenswrapper[4809]: I0930 02:26:55.659145 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="36e332a7202cf82bf8991adf92263ecd4ddf4c08f831b8fd1bb95ae252eee0eb" exitCode=0 Sep 30 02:26:55 crc kubenswrapper[4809]: I0930 02:26:55.659187 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"36e332a7202cf82bf8991adf92263ecd4ddf4c08f831b8fd1bb95ae252eee0eb"} Sep 30 02:26:55 crc kubenswrapper[4809]: I0930 02:26:55.661486 4809 scope.go:117] "RemoveContainer" containerID="4e03f2fe21334584620a57638696f4950faba6fb737ff44ebefd2f88e99dccb7" Sep 30 02:26:55 crc kubenswrapper[4809]: I0930 02:26:55.662532 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tx29" event={"ID":"529f5724-86da-4d8a-8f47-509c07f2799e","Type":"ContainerDied","Data":"d5651ab35dc5e0f6fb7034b3cd623ad6c18bb6bf08fd1abdd45b604ae6c08d45"} Sep 30 02:26:55 crc kubenswrapper[4809]: I0930 02:26:55.662589 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4tx29" Sep 30 02:26:55 crc kubenswrapper[4809]: I0930 02:26:55.713300 4809 scope.go:117] "RemoveContainer" containerID="ec81534ccf462eecb164c73a652c8491244fd5440395a1f0557844169460dd40" Sep 30 02:26:55 crc kubenswrapper[4809]: I0930 02:26:55.729922 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4tx29"] Sep 30 02:26:55 crc kubenswrapper[4809]: I0930 02:26:55.729960 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4tx29"] Sep 30 02:26:55 crc kubenswrapper[4809]: I0930 02:26:55.745400 4809 scope.go:117] "RemoveContainer" containerID="be34e0fb8864929aa1f0803c1080191696388752f32dee3f69a9c6cbf302e061" Sep 30 02:26:55 crc kubenswrapper[4809]: I0930 02:26:55.809435 4809 scope.go:117] "RemoveContainer" containerID="b57b070bdbb16a7830d7a97b813c46752cb2b2bd5ab7d79baf46dc10231bea20" Sep 30 02:26:56 crc kubenswrapper[4809]: I0930 02:26:56.676334 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef"} Sep 30 02:26:57 crc kubenswrapper[4809]: I0930 02:26:57.704266 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="529f5724-86da-4d8a-8f47-509c07f2799e" path="/var/lib/kubelet/pods/529f5724-86da-4d8a-8f47-509c07f2799e/volumes" Sep 30 02:27:44 crc kubenswrapper[4809]: I0930 02:27:44.148991 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-m2vgz"] Sep 30 02:27:44 crc kubenswrapper[4809]: E0930 02:27:44.162330 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="529f5724-86da-4d8a-8f47-509c07f2799e" containerName="extract-content" Sep 30 02:27:44 crc kubenswrapper[4809]: I0930 02:27:44.162373 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="529f5724-86da-4d8a-8f47-509c07f2799e" containerName="extract-content" Sep 30 02:27:44 crc kubenswrapper[4809]: E0930 02:27:44.163121 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="529f5724-86da-4d8a-8f47-509c07f2799e" containerName="extract-utilities" Sep 30 02:27:44 crc kubenswrapper[4809]: I0930 02:27:44.163142 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="529f5724-86da-4d8a-8f47-509c07f2799e" containerName="extract-utilities" Sep 30 02:27:44 crc kubenswrapper[4809]: E0930 02:27:44.163181 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="529f5724-86da-4d8a-8f47-509c07f2799e" containerName="registry-server" Sep 30 02:27:44 crc kubenswrapper[4809]: I0930 02:27:44.163190 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="529f5724-86da-4d8a-8f47-509c07f2799e" containerName="registry-server" Sep 30 02:27:44 crc kubenswrapper[4809]: I0930 02:27:44.164808 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="529f5724-86da-4d8a-8f47-509c07f2799e" containerName="registry-server" Sep 30 02:27:44 crc kubenswrapper[4809]: I0930 02:27:44.180928 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m2vgz" Sep 30 02:27:44 crc kubenswrapper[4809]: I0930 02:27:44.260573 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de6dc8d9-125c-4e66-a616-89d26b3a4ae1-utilities\") pod \"redhat-operators-m2vgz\" (UID: \"de6dc8d9-125c-4e66-a616-89d26b3a4ae1\") " pod="openshift-marketplace/redhat-operators-m2vgz" Sep 30 02:27:44 crc kubenswrapper[4809]: I0930 02:27:44.260761 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzzw9\" (UniqueName: \"kubernetes.io/projected/de6dc8d9-125c-4e66-a616-89d26b3a4ae1-kube-api-access-hzzw9\") pod \"redhat-operators-m2vgz\" (UID: \"de6dc8d9-125c-4e66-a616-89d26b3a4ae1\") " pod="openshift-marketplace/redhat-operators-m2vgz" Sep 30 02:27:44 crc kubenswrapper[4809]: I0930 02:27:44.260831 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de6dc8d9-125c-4e66-a616-89d26b3a4ae1-catalog-content\") pod \"redhat-operators-m2vgz\" (UID: \"de6dc8d9-125c-4e66-a616-89d26b3a4ae1\") " pod="openshift-marketplace/redhat-operators-m2vgz" Sep 30 02:27:44 crc kubenswrapper[4809]: I0930 02:27:44.362521 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de6dc8d9-125c-4e66-a616-89d26b3a4ae1-utilities\") pod \"redhat-operators-m2vgz\" (UID: \"de6dc8d9-125c-4e66-a616-89d26b3a4ae1\") " pod="openshift-marketplace/redhat-operators-m2vgz" Sep 30 02:27:44 crc kubenswrapper[4809]: I0930 02:27:44.362614 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzzw9\" (UniqueName: \"kubernetes.io/projected/de6dc8d9-125c-4e66-a616-89d26b3a4ae1-kube-api-access-hzzw9\") pod \"redhat-operators-m2vgz\" (UID: \"de6dc8d9-125c-4e66-a616-89d26b3a4ae1\") " pod="openshift-marketplace/redhat-operators-m2vgz" Sep 30 02:27:44 crc kubenswrapper[4809]: I0930 02:27:44.362765 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de6dc8d9-125c-4e66-a616-89d26b3a4ae1-catalog-content\") pod \"redhat-operators-m2vgz\" (UID: \"de6dc8d9-125c-4e66-a616-89d26b3a4ae1\") " pod="openshift-marketplace/redhat-operators-m2vgz" Sep 30 02:27:44 crc kubenswrapper[4809]: I0930 02:27:44.369323 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de6dc8d9-125c-4e66-a616-89d26b3a4ae1-utilities\") pod \"redhat-operators-m2vgz\" (UID: \"de6dc8d9-125c-4e66-a616-89d26b3a4ae1\") " pod="openshift-marketplace/redhat-operators-m2vgz" Sep 30 02:27:44 crc kubenswrapper[4809]: I0930 02:27:44.370420 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de6dc8d9-125c-4e66-a616-89d26b3a4ae1-catalog-content\") pod \"redhat-operators-m2vgz\" (UID: \"de6dc8d9-125c-4e66-a616-89d26b3a4ae1\") " pod="openshift-marketplace/redhat-operators-m2vgz" Sep 30 02:27:44 crc kubenswrapper[4809]: I0930 02:27:44.429310 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzzw9\" (UniqueName: \"kubernetes.io/projected/de6dc8d9-125c-4e66-a616-89d26b3a4ae1-kube-api-access-hzzw9\") pod \"redhat-operators-m2vgz\" (UID: \"de6dc8d9-125c-4e66-a616-89d26b3a4ae1\") " pod="openshift-marketplace/redhat-operators-m2vgz" Sep 30 02:27:44 crc kubenswrapper[4809]: I0930 02:27:44.458479 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m2vgz"] Sep 30 02:27:44 crc kubenswrapper[4809]: I0930 02:27:44.600145 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m2vgz" Sep 30 02:27:46 crc kubenswrapper[4809]: I0930 02:27:46.242467 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m2vgz"] Sep 30 02:27:47 crc kubenswrapper[4809]: I0930 02:27:47.192611 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2vgz" event={"ID":"de6dc8d9-125c-4e66-a616-89d26b3a4ae1","Type":"ContainerDied","Data":"4836e4db432f0270d32a41b5c5cbb195b51ba442a5b654b2a7e653f5000c4fbe"} Sep 30 02:27:47 crc kubenswrapper[4809]: I0930 02:27:47.192376 4809 generic.go:334] "Generic (PLEG): container finished" podID="de6dc8d9-125c-4e66-a616-89d26b3a4ae1" containerID="4836e4db432f0270d32a41b5c5cbb195b51ba442a5b654b2a7e653f5000c4fbe" exitCode=0 Sep 30 02:27:47 crc kubenswrapper[4809]: I0930 02:27:47.192964 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2vgz" event={"ID":"de6dc8d9-125c-4e66-a616-89d26b3a4ae1","Type":"ContainerStarted","Data":"009e233ca0ba9b05d7b9674ddcdaafc3d001987b85e502b2c6af0e1cca5dda63"} Sep 30 02:27:49 crc kubenswrapper[4809]: I0930 02:27:49.217589 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2vgz" event={"ID":"de6dc8d9-125c-4e66-a616-89d26b3a4ae1","Type":"ContainerStarted","Data":"e9ec541dd9eb8f0bb6ee0caf2f09077f989275b384e75eb2a8531f5dddc1f8bd"} Sep 30 02:27:54 crc kubenswrapper[4809]: I0930 02:27:54.292140 4809 generic.go:334] "Generic (PLEG): container finished" podID="de6dc8d9-125c-4e66-a616-89d26b3a4ae1" containerID="e9ec541dd9eb8f0bb6ee0caf2f09077f989275b384e75eb2a8531f5dddc1f8bd" exitCode=0 Sep 30 02:27:54 crc kubenswrapper[4809]: I0930 02:27:54.292234 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2vgz" event={"ID":"de6dc8d9-125c-4e66-a616-89d26b3a4ae1","Type":"ContainerDied","Data":"e9ec541dd9eb8f0bb6ee0caf2f09077f989275b384e75eb2a8531f5dddc1f8bd"} Sep 30 02:27:55 crc kubenswrapper[4809]: I0930 02:27:55.312276 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2vgz" event={"ID":"de6dc8d9-125c-4e66-a616-89d26b3a4ae1","Type":"ContainerStarted","Data":"99d85a3682007081ae50247e9c1a73998f62ddc0735266b22d70568a3bb456e6"} Sep 30 02:27:55 crc kubenswrapper[4809]: I0930 02:27:55.333636 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-m2vgz" podStartSLOduration=4.718743257 podStartE2EDuration="12.331116163s" podCreationTimestamp="2025-09-30 02:27:43 +0000 UTC" firstStartedPulling="2025-09-30 02:27:47.19691437 +0000 UTC m=+8318.233163788" lastFinishedPulling="2025-09-30 02:27:54.809287286 +0000 UTC m=+8325.845536694" observedRunningTime="2025-09-30 02:27:55.33029338 +0000 UTC m=+8326.366542788" watchObservedRunningTime="2025-09-30 02:27:55.331116163 +0000 UTC m=+8326.367380081" Sep 30 02:28:04 crc kubenswrapper[4809]: I0930 02:28:04.602842 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-m2vgz" Sep 30 02:28:04 crc kubenswrapper[4809]: I0930 02:28:04.603479 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-m2vgz" Sep 30 02:28:05 crc kubenswrapper[4809]: I0930 02:28:05.676549 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-m2vgz" podUID="de6dc8d9-125c-4e66-a616-89d26b3a4ae1" containerName="registry-server" probeResult="failure" output=< Sep 30 02:28:05 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 02:28:05 crc kubenswrapper[4809]: > Sep 30 02:28:15 crc kubenswrapper[4809]: I0930 02:28:15.658958 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-m2vgz" podUID="de6dc8d9-125c-4e66-a616-89d26b3a4ae1" containerName="registry-server" probeResult="failure" output=< Sep 30 02:28:15 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 02:28:15 crc kubenswrapper[4809]: > Sep 30 02:28:25 crc kubenswrapper[4809]: I0930 02:28:25.661119 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-m2vgz" podUID="de6dc8d9-125c-4e66-a616-89d26b3a4ae1" containerName="registry-server" probeResult="failure" output=< Sep 30 02:28:25 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 02:28:25 crc kubenswrapper[4809]: > Sep 30 02:28:35 crc kubenswrapper[4809]: I0930 02:28:35.694081 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-m2vgz" podUID="de6dc8d9-125c-4e66-a616-89d26b3a4ae1" containerName="registry-server" probeResult="failure" output=< Sep 30 02:28:35 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 02:28:35 crc kubenswrapper[4809]: > Sep 30 02:28:45 crc kubenswrapper[4809]: I0930 02:28:45.680429 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-m2vgz" podUID="de6dc8d9-125c-4e66-a616-89d26b3a4ae1" containerName="registry-server" probeResult="failure" output=< Sep 30 02:28:45 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 02:28:45 crc kubenswrapper[4809]: > Sep 30 02:28:54 crc kubenswrapper[4809]: I0930 02:28:54.685208 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-m2vgz" Sep 30 02:28:54 crc kubenswrapper[4809]: I0930 02:28:54.761710 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-m2vgz" Sep 30 02:28:54 crc kubenswrapper[4809]: I0930 02:28:54.952959 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m2vgz"] Sep 30 02:28:55 crc kubenswrapper[4809]: I0930 02:28:55.324710 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:28:55 crc kubenswrapper[4809]: I0930 02:28:55.327167 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:28:55 crc kubenswrapper[4809]: I0930 02:28:55.936101 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-m2vgz" podUID="de6dc8d9-125c-4e66-a616-89d26b3a4ae1" containerName="registry-server" containerID="cri-o://99d85a3682007081ae50247e9c1a73998f62ddc0735266b22d70568a3bb456e6" gracePeriod=2 Sep 30 02:28:56 crc kubenswrapper[4809]: I0930 02:28:56.944943 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2vgz" event={"ID":"de6dc8d9-125c-4e66-a616-89d26b3a4ae1","Type":"ContainerDied","Data":"99d85a3682007081ae50247e9c1a73998f62ddc0735266b22d70568a3bb456e6"} Sep 30 02:28:56 crc kubenswrapper[4809]: I0930 02:28:56.944904 4809 generic.go:334] "Generic (PLEG): container finished" podID="de6dc8d9-125c-4e66-a616-89d26b3a4ae1" containerID="99d85a3682007081ae50247e9c1a73998f62ddc0735266b22d70568a3bb456e6" exitCode=0 Sep 30 02:28:57 crc kubenswrapper[4809]: I0930 02:28:57.481071 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m2vgz" Sep 30 02:28:57 crc kubenswrapper[4809]: I0930 02:28:57.581108 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de6dc8d9-125c-4e66-a616-89d26b3a4ae1-utilities\") pod \"de6dc8d9-125c-4e66-a616-89d26b3a4ae1\" (UID: \"de6dc8d9-125c-4e66-a616-89d26b3a4ae1\") " Sep 30 02:28:57 crc kubenswrapper[4809]: I0930 02:28:57.581148 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzzw9\" (UniqueName: \"kubernetes.io/projected/de6dc8d9-125c-4e66-a616-89d26b3a4ae1-kube-api-access-hzzw9\") pod \"de6dc8d9-125c-4e66-a616-89d26b3a4ae1\" (UID: \"de6dc8d9-125c-4e66-a616-89d26b3a4ae1\") " Sep 30 02:28:57 crc kubenswrapper[4809]: I0930 02:28:57.581434 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de6dc8d9-125c-4e66-a616-89d26b3a4ae1-catalog-content\") pod \"de6dc8d9-125c-4e66-a616-89d26b3a4ae1\" (UID: \"de6dc8d9-125c-4e66-a616-89d26b3a4ae1\") " Sep 30 02:28:57 crc kubenswrapper[4809]: I0930 02:28:57.587732 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de6dc8d9-125c-4e66-a616-89d26b3a4ae1-utilities" (OuterVolumeSpecName: "utilities") pod "de6dc8d9-125c-4e66-a616-89d26b3a4ae1" (UID: "de6dc8d9-125c-4e66-a616-89d26b3a4ae1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:28:57 crc kubenswrapper[4809]: I0930 02:28:57.626796 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de6dc8d9-125c-4e66-a616-89d26b3a4ae1-kube-api-access-hzzw9" (OuterVolumeSpecName: "kube-api-access-hzzw9") pod "de6dc8d9-125c-4e66-a616-89d26b3a4ae1" (UID: "de6dc8d9-125c-4e66-a616-89d26b3a4ae1"). InnerVolumeSpecName "kube-api-access-hzzw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:28:57 crc kubenswrapper[4809]: I0930 02:28:57.685363 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de6dc8d9-125c-4e66-a616-89d26b3a4ae1-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 02:28:57 crc kubenswrapper[4809]: I0930 02:28:57.685401 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzzw9\" (UniqueName: \"kubernetes.io/projected/de6dc8d9-125c-4e66-a616-89d26b3a4ae1-kube-api-access-hzzw9\") on node \"crc\" DevicePath \"\"" Sep 30 02:28:57 crc kubenswrapper[4809]: I0930 02:28:57.734325 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de6dc8d9-125c-4e66-a616-89d26b3a4ae1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "de6dc8d9-125c-4e66-a616-89d26b3a4ae1" (UID: "de6dc8d9-125c-4e66-a616-89d26b3a4ae1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:28:57 crc kubenswrapper[4809]: I0930 02:28:57.787866 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de6dc8d9-125c-4e66-a616-89d26b3a4ae1-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 02:28:57 crc kubenswrapper[4809]: I0930 02:28:57.958587 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2vgz" event={"ID":"de6dc8d9-125c-4e66-a616-89d26b3a4ae1","Type":"ContainerDied","Data":"009e233ca0ba9b05d7b9674ddcdaafc3d001987b85e502b2c6af0e1cca5dda63"} Sep 30 02:28:57 crc kubenswrapper[4809]: I0930 02:28:57.958785 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m2vgz" Sep 30 02:28:57 crc kubenswrapper[4809]: I0930 02:28:57.961577 4809 scope.go:117] "RemoveContainer" containerID="99d85a3682007081ae50247e9c1a73998f62ddc0735266b22d70568a3bb456e6" Sep 30 02:28:57 crc kubenswrapper[4809]: I0930 02:28:57.999049 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m2vgz"] Sep 30 02:28:58 crc kubenswrapper[4809]: I0930 02:28:58.009859 4809 scope.go:117] "RemoveContainer" containerID="e9ec541dd9eb8f0bb6ee0caf2f09077f989275b384e75eb2a8531f5dddc1f8bd" Sep 30 02:28:58 crc kubenswrapper[4809]: I0930 02:28:58.014795 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-m2vgz"] Sep 30 02:28:58 crc kubenswrapper[4809]: I0930 02:28:58.039079 4809 scope.go:117] "RemoveContainer" containerID="4836e4db432f0270d32a41b5c5cbb195b51ba442a5b654b2a7e653f5000c4fbe" Sep 30 02:28:59 crc kubenswrapper[4809]: I0930 02:28:59.710068 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de6dc8d9-125c-4e66-a616-89d26b3a4ae1" path="/var/lib/kubelet/pods/de6dc8d9-125c-4e66-a616-89d26b3a4ae1/volumes" Sep 30 02:29:25 crc kubenswrapper[4809]: I0930 02:29:25.325381 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:29:25 crc kubenswrapper[4809]: I0930 02:29:25.325956 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:29:55 crc kubenswrapper[4809]: I0930 02:29:55.324879 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:29:55 crc kubenswrapper[4809]: I0930 02:29:55.325937 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:29:55 crc kubenswrapper[4809]: I0930 02:29:55.326007 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 02:29:55 crc kubenswrapper[4809]: I0930 02:29:55.326659 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 02:29:55 crc kubenswrapper[4809]: I0930 02:29:55.326716 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" gracePeriod=600 Sep 30 02:29:55 crc kubenswrapper[4809]: E0930 02:29:55.479746 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:29:55 crc kubenswrapper[4809]: I0930 02:29:55.711512 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" exitCode=0 Sep 30 02:29:55 crc kubenswrapper[4809]: I0930 02:29:55.712239 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef"} Sep 30 02:29:55 crc kubenswrapper[4809]: I0930 02:29:55.712318 4809 scope.go:117] "RemoveContainer" containerID="36e332a7202cf82bf8991adf92263ecd4ddf4c08f831b8fd1bb95ae252eee0eb" Sep 30 02:29:55 crc kubenswrapper[4809]: I0930 02:29:55.713178 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:29:55 crc kubenswrapper[4809]: E0930 02:29:55.713517 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:30:00 crc kubenswrapper[4809]: I0930 02:30:00.266594 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m"] Sep 30 02:30:00 crc kubenswrapper[4809]: E0930 02:30:00.271819 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de6dc8d9-125c-4e66-a616-89d26b3a4ae1" containerName="registry-server" Sep 30 02:30:00 crc kubenswrapper[4809]: I0930 02:30:00.271852 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="de6dc8d9-125c-4e66-a616-89d26b3a4ae1" containerName="registry-server" Sep 30 02:30:00 crc kubenswrapper[4809]: E0930 02:30:00.271909 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de6dc8d9-125c-4e66-a616-89d26b3a4ae1" containerName="extract-utilities" Sep 30 02:30:00 crc kubenswrapper[4809]: I0930 02:30:00.271918 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="de6dc8d9-125c-4e66-a616-89d26b3a4ae1" containerName="extract-utilities" Sep 30 02:30:00 crc kubenswrapper[4809]: E0930 02:30:00.271938 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de6dc8d9-125c-4e66-a616-89d26b3a4ae1" containerName="extract-content" Sep 30 02:30:00 crc kubenswrapper[4809]: I0930 02:30:00.271947 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="de6dc8d9-125c-4e66-a616-89d26b3a4ae1" containerName="extract-content" Sep 30 02:30:00 crc kubenswrapper[4809]: I0930 02:30:00.275655 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="de6dc8d9-125c-4e66-a616-89d26b3a4ae1" containerName="registry-server" Sep 30 02:30:00 crc kubenswrapper[4809]: I0930 02:30:00.282148 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m" Sep 30 02:30:00 crc kubenswrapper[4809]: I0930 02:30:00.300017 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 02:30:00 crc kubenswrapper[4809]: I0930 02:30:00.303873 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 02:30:00 crc kubenswrapper[4809]: I0930 02:30:00.349207 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m"] Sep 30 02:30:00 crc kubenswrapper[4809]: I0930 02:30:00.352190 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2eb72374-a92d-43c5-9d1b-c04d2edc6620-config-volume\") pod \"collect-profiles-29319990-ql49m\" (UID: \"2eb72374-a92d-43c5-9d1b-c04d2edc6620\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m" Sep 30 02:30:00 crc kubenswrapper[4809]: I0930 02:30:00.352261 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmv7r\" (UniqueName: \"kubernetes.io/projected/2eb72374-a92d-43c5-9d1b-c04d2edc6620-kube-api-access-hmv7r\") pod \"collect-profiles-29319990-ql49m\" (UID: \"2eb72374-a92d-43c5-9d1b-c04d2edc6620\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m" Sep 30 02:30:00 crc kubenswrapper[4809]: I0930 02:30:00.352321 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2eb72374-a92d-43c5-9d1b-c04d2edc6620-secret-volume\") pod \"collect-profiles-29319990-ql49m\" (UID: \"2eb72374-a92d-43c5-9d1b-c04d2edc6620\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m" Sep 30 02:30:00 crc kubenswrapper[4809]: I0930 02:30:00.454525 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmv7r\" (UniqueName: \"kubernetes.io/projected/2eb72374-a92d-43c5-9d1b-c04d2edc6620-kube-api-access-hmv7r\") pod \"collect-profiles-29319990-ql49m\" (UID: \"2eb72374-a92d-43c5-9d1b-c04d2edc6620\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m" Sep 30 02:30:00 crc kubenswrapper[4809]: I0930 02:30:00.454579 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2eb72374-a92d-43c5-9d1b-c04d2edc6620-secret-volume\") pod \"collect-profiles-29319990-ql49m\" (UID: \"2eb72374-a92d-43c5-9d1b-c04d2edc6620\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m" Sep 30 02:30:00 crc kubenswrapper[4809]: I0930 02:30:00.454763 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2eb72374-a92d-43c5-9d1b-c04d2edc6620-config-volume\") pod \"collect-profiles-29319990-ql49m\" (UID: \"2eb72374-a92d-43c5-9d1b-c04d2edc6620\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m" Sep 30 02:30:00 crc kubenswrapper[4809]: I0930 02:30:00.457922 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2eb72374-a92d-43c5-9d1b-c04d2edc6620-config-volume\") pod \"collect-profiles-29319990-ql49m\" (UID: \"2eb72374-a92d-43c5-9d1b-c04d2edc6620\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m" Sep 30 02:30:00 crc kubenswrapper[4809]: I0930 02:30:00.467342 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2eb72374-a92d-43c5-9d1b-c04d2edc6620-secret-volume\") pod \"collect-profiles-29319990-ql49m\" (UID: \"2eb72374-a92d-43c5-9d1b-c04d2edc6620\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m" Sep 30 02:30:00 crc kubenswrapper[4809]: I0930 02:30:00.474870 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmv7r\" (UniqueName: \"kubernetes.io/projected/2eb72374-a92d-43c5-9d1b-c04d2edc6620-kube-api-access-hmv7r\") pod \"collect-profiles-29319990-ql49m\" (UID: \"2eb72374-a92d-43c5-9d1b-c04d2edc6620\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m" Sep 30 02:30:00 crc kubenswrapper[4809]: I0930 02:30:00.622049 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m" Sep 30 02:30:01 crc kubenswrapper[4809]: I0930 02:30:01.474932 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m"] Sep 30 02:30:01 crc kubenswrapper[4809]: I0930 02:30:01.796200 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m" event={"ID":"2eb72374-a92d-43c5-9d1b-c04d2edc6620","Type":"ContainerStarted","Data":"ab202224c5cc7ce138dfbf88a3302b843dadb1b0894ebe720b3f8cced8a79868"} Sep 30 02:30:01 crc kubenswrapper[4809]: I0930 02:30:01.796715 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m" event={"ID":"2eb72374-a92d-43c5-9d1b-c04d2edc6620","Type":"ContainerStarted","Data":"8c2f2aa7c9b978432b277430f2c8b1071a13f3a6b6f60519cfc32f5f37bd2a95"} Sep 30 02:30:01 crc kubenswrapper[4809]: I0930 02:30:01.828688 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m" podStartSLOduration=1.827063127 podStartE2EDuration="1.827063127s" podCreationTimestamp="2025-09-30 02:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 02:30:01.818217456 +0000 UTC m=+8452.854466884" watchObservedRunningTime="2025-09-30 02:30:01.827063127 +0000 UTC m=+8452.863312545" Sep 30 02:30:02 crc kubenswrapper[4809]: I0930 02:30:02.808632 4809 generic.go:334] "Generic (PLEG): container finished" podID="2eb72374-a92d-43c5-9d1b-c04d2edc6620" containerID="ab202224c5cc7ce138dfbf88a3302b843dadb1b0894ebe720b3f8cced8a79868" exitCode=0 Sep 30 02:30:02 crc kubenswrapper[4809]: I0930 02:30:02.808720 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m" event={"ID":"2eb72374-a92d-43c5-9d1b-c04d2edc6620","Type":"ContainerDied","Data":"ab202224c5cc7ce138dfbf88a3302b843dadb1b0894ebe720b3f8cced8a79868"} Sep 30 02:30:04 crc kubenswrapper[4809]: I0930 02:30:04.388949 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m" Sep 30 02:30:04 crc kubenswrapper[4809]: I0930 02:30:04.448099 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hmv7r\" (UniqueName: \"kubernetes.io/projected/2eb72374-a92d-43c5-9d1b-c04d2edc6620-kube-api-access-hmv7r\") pod \"2eb72374-a92d-43c5-9d1b-c04d2edc6620\" (UID: \"2eb72374-a92d-43c5-9d1b-c04d2edc6620\") " Sep 30 02:30:04 crc kubenswrapper[4809]: I0930 02:30:04.448222 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2eb72374-a92d-43c5-9d1b-c04d2edc6620-config-volume\") pod \"2eb72374-a92d-43c5-9d1b-c04d2edc6620\" (UID: \"2eb72374-a92d-43c5-9d1b-c04d2edc6620\") " Sep 30 02:30:04 crc kubenswrapper[4809]: I0930 02:30:04.448494 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2eb72374-a92d-43c5-9d1b-c04d2edc6620-secret-volume\") pod \"2eb72374-a92d-43c5-9d1b-c04d2edc6620\" (UID: \"2eb72374-a92d-43c5-9d1b-c04d2edc6620\") " Sep 30 02:30:04 crc kubenswrapper[4809]: I0930 02:30:04.449096 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2eb72374-a92d-43c5-9d1b-c04d2edc6620-config-volume" (OuterVolumeSpecName: "config-volume") pod "2eb72374-a92d-43c5-9d1b-c04d2edc6620" (UID: "2eb72374-a92d-43c5-9d1b-c04d2edc6620"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 02:30:04 crc kubenswrapper[4809]: I0930 02:30:04.455832 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2eb72374-a92d-43c5-9d1b-c04d2edc6620-kube-api-access-hmv7r" (OuterVolumeSpecName: "kube-api-access-hmv7r") pod "2eb72374-a92d-43c5-9d1b-c04d2edc6620" (UID: "2eb72374-a92d-43c5-9d1b-c04d2edc6620"). InnerVolumeSpecName "kube-api-access-hmv7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:30:04 crc kubenswrapper[4809]: I0930 02:30:04.456552 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2eb72374-a92d-43c5-9d1b-c04d2edc6620-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2eb72374-a92d-43c5-9d1b-c04d2edc6620" (UID: "2eb72374-a92d-43c5-9d1b-c04d2edc6620"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 02:30:04 crc kubenswrapper[4809]: I0930 02:30:04.551488 4809 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2eb72374-a92d-43c5-9d1b-c04d2edc6620-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 02:30:04 crc kubenswrapper[4809]: I0930 02:30:04.552571 4809 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2eb72374-a92d-43c5-9d1b-c04d2edc6620-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 02:30:04 crc kubenswrapper[4809]: I0930 02:30:04.552587 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hmv7r\" (UniqueName: \"kubernetes.io/projected/2eb72374-a92d-43c5-9d1b-c04d2edc6620-kube-api-access-hmv7r\") on node \"crc\" DevicePath \"\"" Sep 30 02:30:04 crc kubenswrapper[4809]: I0930 02:30:04.556261 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w"] Sep 30 02:30:04 crc kubenswrapper[4809]: I0930 02:30:04.569484 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319945-bnw6w"] Sep 30 02:30:04 crc kubenswrapper[4809]: I0930 02:30:04.833711 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m" event={"ID":"2eb72374-a92d-43c5-9d1b-c04d2edc6620","Type":"ContainerDied","Data":"8c2f2aa7c9b978432b277430f2c8b1071a13f3a6b6f60519cfc32f5f37bd2a95"} Sep 30 02:30:04 crc kubenswrapper[4809]: I0930 02:30:04.833989 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8c2f2aa7c9b978432b277430f2c8b1071a13f3a6b6f60519cfc32f5f37bd2a95" Sep 30 02:30:04 crc kubenswrapper[4809]: I0930 02:30:04.833764 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319990-ql49m" Sep 30 02:30:05 crc kubenswrapper[4809]: I0930 02:30:05.710850 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="043d1b95-3e33-4acf-a8d9-b17644c28f28" path="/var/lib/kubelet/pods/043d1b95-3e33-4acf-a8d9-b17644c28f28/volumes" Sep 30 02:30:08 crc kubenswrapper[4809]: I0930 02:30:08.692253 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:30:08 crc kubenswrapper[4809]: E0930 02:30:08.693832 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:30:20 crc kubenswrapper[4809]: I0930 02:30:20.691109 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:30:20 crc kubenswrapper[4809]: E0930 02:30:20.691996 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:30:21 crc kubenswrapper[4809]: I0930 02:30:21.602670 4809 scope.go:117] "RemoveContainer" containerID="bd76e70599e1e48cde7aee2562646ff60bbf210c9450794040e719e37a2e20b7" Sep 30 02:30:35 crc kubenswrapper[4809]: I0930 02:30:35.690705 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:30:35 crc kubenswrapper[4809]: E0930 02:30:35.691582 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:30:49 crc kubenswrapper[4809]: I0930 02:30:49.708683 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:30:49 crc kubenswrapper[4809]: E0930 02:30:49.709730 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:31:00 crc kubenswrapper[4809]: I0930 02:31:00.691624 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:31:00 crc kubenswrapper[4809]: E0930 02:31:00.692681 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:31:14 crc kubenswrapper[4809]: I0930 02:31:14.691291 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:31:14 crc kubenswrapper[4809]: E0930 02:31:14.692211 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:31:27 crc kubenswrapper[4809]: I0930 02:31:27.690948 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:31:27 crc kubenswrapper[4809]: E0930 02:31:27.691749 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:31:39 crc kubenswrapper[4809]: I0930 02:31:39.691252 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:31:39 crc kubenswrapper[4809]: E0930 02:31:39.691989 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:31:52 crc kubenswrapper[4809]: I0930 02:31:52.691171 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:31:52 crc kubenswrapper[4809]: E0930 02:31:52.691925 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:32:04 crc kubenswrapper[4809]: I0930 02:32:04.692943 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:32:04 crc kubenswrapper[4809]: E0930 02:32:04.695089 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:32:19 crc kubenswrapper[4809]: I0930 02:32:19.702173 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:32:19 crc kubenswrapper[4809]: E0930 02:32:19.702933 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:32:32 crc kubenswrapper[4809]: I0930 02:32:32.691709 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:32:32 crc kubenswrapper[4809]: E0930 02:32:32.692505 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:32:43 crc kubenswrapper[4809]: I0930 02:32:43.692091 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:32:43 crc kubenswrapper[4809]: E0930 02:32:43.693102 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:32:54 crc kubenswrapper[4809]: I0930 02:32:54.693152 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:32:54 crc kubenswrapper[4809]: E0930 02:32:54.694212 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:33:05 crc kubenswrapper[4809]: I0930 02:33:05.692232 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:33:05 crc kubenswrapper[4809]: E0930 02:33:05.693303 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:33:18 crc kubenswrapper[4809]: I0930 02:33:18.691660 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:33:18 crc kubenswrapper[4809]: E0930 02:33:18.692576 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:33:32 crc kubenswrapper[4809]: I0930 02:33:32.692116 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:33:32 crc kubenswrapper[4809]: E0930 02:33:32.693621 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:33:46 crc kubenswrapper[4809]: I0930 02:33:46.690831 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:33:46 crc kubenswrapper[4809]: E0930 02:33:46.691920 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:34:00 crc kubenswrapper[4809]: I0930 02:34:00.691913 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:34:00 crc kubenswrapper[4809]: E0930 02:34:00.693454 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:34:15 crc kubenswrapper[4809]: I0930 02:34:15.692132 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:34:15 crc kubenswrapper[4809]: E0930 02:34:15.693211 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:34:29 crc kubenswrapper[4809]: I0930 02:34:29.711952 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:34:29 crc kubenswrapper[4809]: E0930 02:34:29.712978 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:34:40 crc kubenswrapper[4809]: I0930 02:34:40.692401 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:34:40 crc kubenswrapper[4809]: E0930 02:34:40.693579 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:34:54 crc kubenswrapper[4809]: I0930 02:34:54.785461 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-js6zz"] Sep 30 02:34:54 crc kubenswrapper[4809]: E0930 02:34:54.791509 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2eb72374-a92d-43c5-9d1b-c04d2edc6620" containerName="collect-profiles" Sep 30 02:34:54 crc kubenswrapper[4809]: I0930 02:34:54.791559 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="2eb72374-a92d-43c5-9d1b-c04d2edc6620" containerName="collect-profiles" Sep 30 02:34:54 crc kubenswrapper[4809]: I0930 02:34:54.793342 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="2eb72374-a92d-43c5-9d1b-c04d2edc6620" containerName="collect-profiles" Sep 30 02:34:54 crc kubenswrapper[4809]: I0930 02:34:54.801310 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-js6zz" Sep 30 02:34:54 crc kubenswrapper[4809]: I0930 02:34:54.807881 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-js6zz"] Sep 30 02:34:54 crc kubenswrapper[4809]: I0930 02:34:54.941689 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztppv\" (UniqueName: \"kubernetes.io/projected/a190f550-57c8-4651-9053-5f5a0526d635-kube-api-access-ztppv\") pod \"certified-operators-js6zz\" (UID: \"a190f550-57c8-4651-9053-5f5a0526d635\") " pod="openshift-marketplace/certified-operators-js6zz" Sep 30 02:34:54 crc kubenswrapper[4809]: I0930 02:34:54.941756 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a190f550-57c8-4651-9053-5f5a0526d635-catalog-content\") pod \"certified-operators-js6zz\" (UID: \"a190f550-57c8-4651-9053-5f5a0526d635\") " pod="openshift-marketplace/certified-operators-js6zz" Sep 30 02:34:54 crc kubenswrapper[4809]: I0930 02:34:54.941863 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a190f550-57c8-4651-9053-5f5a0526d635-utilities\") pod \"certified-operators-js6zz\" (UID: \"a190f550-57c8-4651-9053-5f5a0526d635\") " pod="openshift-marketplace/certified-operators-js6zz" Sep 30 02:34:55 crc kubenswrapper[4809]: I0930 02:34:55.043738 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a190f550-57c8-4651-9053-5f5a0526d635-utilities\") pod \"certified-operators-js6zz\" (UID: \"a190f550-57c8-4651-9053-5f5a0526d635\") " pod="openshift-marketplace/certified-operators-js6zz" Sep 30 02:34:55 crc kubenswrapper[4809]: I0930 02:34:55.043974 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztppv\" (UniqueName: \"kubernetes.io/projected/a190f550-57c8-4651-9053-5f5a0526d635-kube-api-access-ztppv\") pod \"certified-operators-js6zz\" (UID: \"a190f550-57c8-4651-9053-5f5a0526d635\") " pod="openshift-marketplace/certified-operators-js6zz" Sep 30 02:34:55 crc kubenswrapper[4809]: I0930 02:34:55.044467 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a190f550-57c8-4651-9053-5f5a0526d635-catalog-content\") pod \"certified-operators-js6zz\" (UID: \"a190f550-57c8-4651-9053-5f5a0526d635\") " pod="openshift-marketplace/certified-operators-js6zz" Sep 30 02:34:55 crc kubenswrapper[4809]: I0930 02:34:55.048548 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a190f550-57c8-4651-9053-5f5a0526d635-utilities\") pod \"certified-operators-js6zz\" (UID: \"a190f550-57c8-4651-9053-5f5a0526d635\") " pod="openshift-marketplace/certified-operators-js6zz" Sep 30 02:34:55 crc kubenswrapper[4809]: I0930 02:34:55.048936 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a190f550-57c8-4651-9053-5f5a0526d635-catalog-content\") pod \"certified-operators-js6zz\" (UID: \"a190f550-57c8-4651-9053-5f5a0526d635\") " pod="openshift-marketplace/certified-operators-js6zz" Sep 30 02:34:55 crc kubenswrapper[4809]: I0930 02:34:55.094768 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztppv\" (UniqueName: \"kubernetes.io/projected/a190f550-57c8-4651-9053-5f5a0526d635-kube-api-access-ztppv\") pod \"certified-operators-js6zz\" (UID: \"a190f550-57c8-4651-9053-5f5a0526d635\") " pod="openshift-marketplace/certified-operators-js6zz" Sep 30 02:34:55 crc kubenswrapper[4809]: I0930 02:34:55.152160 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-js6zz" Sep 30 02:34:55 crc kubenswrapper[4809]: I0930 02:34:55.692320 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:34:56 crc kubenswrapper[4809]: I0930 02:34:56.103968 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-js6zz"] Sep 30 02:34:56 crc kubenswrapper[4809]: I0930 02:34:56.639044 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-js6zz" event={"ID":"a190f550-57c8-4651-9053-5f5a0526d635","Type":"ContainerDied","Data":"2d8de4a988ff27fcf40ed350c68f796d8eb3595eaf1f89f40a6737eb50d36cbc"} Sep 30 02:34:56 crc kubenswrapper[4809]: I0930 02:34:56.639718 4809 generic.go:334] "Generic (PLEG): container finished" podID="a190f550-57c8-4651-9053-5f5a0526d635" containerID="2d8de4a988ff27fcf40ed350c68f796d8eb3595eaf1f89f40a6737eb50d36cbc" exitCode=0 Sep 30 02:34:56 crc kubenswrapper[4809]: I0930 02:34:56.640378 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-js6zz" event={"ID":"a190f550-57c8-4651-9053-5f5a0526d635","Type":"ContainerStarted","Data":"5416be6d1a7304c876178808e6f47d56b6d79fbe837135ddfad5f90399c47c7d"} Sep 30 02:34:56 crc kubenswrapper[4809]: I0930 02:34:56.643879 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"2bbc1b5ae2d7f94976345d37596f7094e7e2dbab97fff80bacb51904b6819d54"} Sep 30 02:34:56 crc kubenswrapper[4809]: I0930 02:34:56.647264 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 02:34:58 crc kubenswrapper[4809]: I0930 02:34:58.691604 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-js6zz" event={"ID":"a190f550-57c8-4651-9053-5f5a0526d635","Type":"ContainerStarted","Data":"0ee04b95e1b73426c2df156aa3712b1e7cd32240737a43344c2bf0e704ff2c03"} Sep 30 02:35:03 crc kubenswrapper[4809]: I0930 02:35:03.808724 4809 generic.go:334] "Generic (PLEG): container finished" podID="a190f550-57c8-4651-9053-5f5a0526d635" containerID="0ee04b95e1b73426c2df156aa3712b1e7cd32240737a43344c2bf0e704ff2c03" exitCode=0 Sep 30 02:35:03 crc kubenswrapper[4809]: I0930 02:35:03.808791 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-js6zz" event={"ID":"a190f550-57c8-4651-9053-5f5a0526d635","Type":"ContainerDied","Data":"0ee04b95e1b73426c2df156aa3712b1e7cd32240737a43344c2bf0e704ff2c03"} Sep 30 02:35:04 crc kubenswrapper[4809]: I0930 02:35:04.822786 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-js6zz" event={"ID":"a190f550-57c8-4651-9053-5f5a0526d635","Type":"ContainerStarted","Data":"bfe687fe081b15938121dad07dec8e7b20eafc8660ee8a2d1f1c1420f8f875e7"} Sep 30 02:35:04 crc kubenswrapper[4809]: I0930 02:35:04.851069 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-js6zz" podStartSLOduration=3.041618313 podStartE2EDuration="10.850385752s" podCreationTimestamp="2025-09-30 02:34:54 +0000 UTC" firstStartedPulling="2025-09-30 02:34:56.64194536 +0000 UTC m=+8747.678194798" lastFinishedPulling="2025-09-30 02:35:04.450712829 +0000 UTC m=+8755.486962237" observedRunningTime="2025-09-30 02:35:04.849750135 +0000 UTC m=+8755.885999553" watchObservedRunningTime="2025-09-30 02:35:04.850385752 +0000 UTC m=+8755.886635160" Sep 30 02:35:05 crc kubenswrapper[4809]: I0930 02:35:05.153816 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-js6zz" Sep 30 02:35:05 crc kubenswrapper[4809]: I0930 02:35:05.154018 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-js6zz" Sep 30 02:35:06 crc kubenswrapper[4809]: I0930 02:35:06.208125 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-js6zz" podUID="a190f550-57c8-4651-9053-5f5a0526d635" containerName="registry-server" probeResult="failure" output=< Sep 30 02:35:06 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 02:35:06 crc kubenswrapper[4809]: > Sep 30 02:35:15 crc kubenswrapper[4809]: I0930 02:35:15.244821 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-js6zz" Sep 30 02:35:15 crc kubenswrapper[4809]: I0930 02:35:15.326729 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-js6zz" Sep 30 02:35:15 crc kubenswrapper[4809]: I0930 02:35:15.524520 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-js6zz"] Sep 30 02:35:16 crc kubenswrapper[4809]: I0930 02:35:16.979452 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-js6zz" podUID="a190f550-57c8-4651-9053-5f5a0526d635" containerName="registry-server" containerID="cri-o://bfe687fe081b15938121dad07dec8e7b20eafc8660ee8a2d1f1c1420f8f875e7" gracePeriod=2 Sep 30 02:35:17 crc kubenswrapper[4809]: I0930 02:35:17.848306 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-js6zz" Sep 30 02:35:17 crc kubenswrapper[4809]: I0930 02:35:17.953412 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a190f550-57c8-4651-9053-5f5a0526d635-catalog-content\") pod \"a190f550-57c8-4651-9053-5f5a0526d635\" (UID: \"a190f550-57c8-4651-9053-5f5a0526d635\") " Sep 30 02:35:17 crc kubenswrapper[4809]: I0930 02:35:17.953836 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ztppv\" (UniqueName: \"kubernetes.io/projected/a190f550-57c8-4651-9053-5f5a0526d635-kube-api-access-ztppv\") pod \"a190f550-57c8-4651-9053-5f5a0526d635\" (UID: \"a190f550-57c8-4651-9053-5f5a0526d635\") " Sep 30 02:35:17 crc kubenswrapper[4809]: I0930 02:35:17.953897 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a190f550-57c8-4651-9053-5f5a0526d635-utilities\") pod \"a190f550-57c8-4651-9053-5f5a0526d635\" (UID: \"a190f550-57c8-4651-9053-5f5a0526d635\") " Sep 30 02:35:17 crc kubenswrapper[4809]: I0930 02:35:17.959002 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a190f550-57c8-4651-9053-5f5a0526d635-utilities" (OuterVolumeSpecName: "utilities") pod "a190f550-57c8-4651-9053-5f5a0526d635" (UID: "a190f550-57c8-4651-9053-5f5a0526d635"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:35:17 crc kubenswrapper[4809]: I0930 02:35:17.966877 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a190f550-57c8-4651-9053-5f5a0526d635-kube-api-access-ztppv" (OuterVolumeSpecName: "kube-api-access-ztppv") pod "a190f550-57c8-4651-9053-5f5a0526d635" (UID: "a190f550-57c8-4651-9053-5f5a0526d635"). InnerVolumeSpecName "kube-api-access-ztppv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:35:17 crc kubenswrapper[4809]: I0930 02:35:17.993427 4809 generic.go:334] "Generic (PLEG): container finished" podID="a190f550-57c8-4651-9053-5f5a0526d635" containerID="bfe687fe081b15938121dad07dec8e7b20eafc8660ee8a2d1f1c1420f8f875e7" exitCode=0 Sep 30 02:35:17 crc kubenswrapper[4809]: I0930 02:35:17.993475 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-js6zz" event={"ID":"a190f550-57c8-4651-9053-5f5a0526d635","Type":"ContainerDied","Data":"bfe687fe081b15938121dad07dec8e7b20eafc8660ee8a2d1f1c1420f8f875e7"} Sep 30 02:35:17 crc kubenswrapper[4809]: I0930 02:35:17.993504 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-js6zz" event={"ID":"a190f550-57c8-4651-9053-5f5a0526d635","Type":"ContainerDied","Data":"5416be6d1a7304c876178808e6f47d56b6d79fbe837135ddfad5f90399c47c7d"} Sep 30 02:35:17 crc kubenswrapper[4809]: I0930 02:35:17.993525 4809 scope.go:117] "RemoveContainer" containerID="bfe687fe081b15938121dad07dec8e7b20eafc8660ee8a2d1f1c1420f8f875e7" Sep 30 02:35:17 crc kubenswrapper[4809]: I0930 02:35:17.993698 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-js6zz" Sep 30 02:35:18 crc kubenswrapper[4809]: I0930 02:35:18.035462 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a190f550-57c8-4651-9053-5f5a0526d635-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a190f550-57c8-4651-9053-5f5a0526d635" (UID: "a190f550-57c8-4651-9053-5f5a0526d635"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:35:18 crc kubenswrapper[4809]: I0930 02:35:18.041791 4809 scope.go:117] "RemoveContainer" containerID="0ee04b95e1b73426c2df156aa3712b1e7cd32240737a43344c2bf0e704ff2c03" Sep 30 02:35:18 crc kubenswrapper[4809]: I0930 02:35:18.057624 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a190f550-57c8-4651-9053-5f5a0526d635-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 02:35:18 crc kubenswrapper[4809]: I0930 02:35:18.057666 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ztppv\" (UniqueName: \"kubernetes.io/projected/a190f550-57c8-4651-9053-5f5a0526d635-kube-api-access-ztppv\") on node \"crc\" DevicePath \"\"" Sep 30 02:35:18 crc kubenswrapper[4809]: I0930 02:35:18.057676 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a190f550-57c8-4651-9053-5f5a0526d635-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 02:35:18 crc kubenswrapper[4809]: I0930 02:35:18.071630 4809 scope.go:117] "RemoveContainer" containerID="2d8de4a988ff27fcf40ed350c68f796d8eb3595eaf1f89f40a6737eb50d36cbc" Sep 30 02:35:18 crc kubenswrapper[4809]: I0930 02:35:18.120575 4809 scope.go:117] "RemoveContainer" containerID="bfe687fe081b15938121dad07dec8e7b20eafc8660ee8a2d1f1c1420f8f875e7" Sep 30 02:35:18 crc kubenswrapper[4809]: E0930 02:35:18.124750 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfe687fe081b15938121dad07dec8e7b20eafc8660ee8a2d1f1c1420f8f875e7\": container with ID starting with bfe687fe081b15938121dad07dec8e7b20eafc8660ee8a2d1f1c1420f8f875e7 not found: ID does not exist" containerID="bfe687fe081b15938121dad07dec8e7b20eafc8660ee8a2d1f1c1420f8f875e7" Sep 30 02:35:18 crc kubenswrapper[4809]: I0930 02:35:18.125525 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfe687fe081b15938121dad07dec8e7b20eafc8660ee8a2d1f1c1420f8f875e7"} err="failed to get container status \"bfe687fe081b15938121dad07dec8e7b20eafc8660ee8a2d1f1c1420f8f875e7\": rpc error: code = NotFound desc = could not find container \"bfe687fe081b15938121dad07dec8e7b20eafc8660ee8a2d1f1c1420f8f875e7\": container with ID starting with bfe687fe081b15938121dad07dec8e7b20eafc8660ee8a2d1f1c1420f8f875e7 not found: ID does not exist" Sep 30 02:35:18 crc kubenswrapper[4809]: I0930 02:35:18.125579 4809 scope.go:117] "RemoveContainer" containerID="0ee04b95e1b73426c2df156aa3712b1e7cd32240737a43344c2bf0e704ff2c03" Sep 30 02:35:18 crc kubenswrapper[4809]: E0930 02:35:18.126128 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ee04b95e1b73426c2df156aa3712b1e7cd32240737a43344c2bf0e704ff2c03\": container with ID starting with 0ee04b95e1b73426c2df156aa3712b1e7cd32240737a43344c2bf0e704ff2c03 not found: ID does not exist" containerID="0ee04b95e1b73426c2df156aa3712b1e7cd32240737a43344c2bf0e704ff2c03" Sep 30 02:35:18 crc kubenswrapper[4809]: I0930 02:35:18.126157 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ee04b95e1b73426c2df156aa3712b1e7cd32240737a43344c2bf0e704ff2c03"} err="failed to get container status \"0ee04b95e1b73426c2df156aa3712b1e7cd32240737a43344c2bf0e704ff2c03\": rpc error: code = NotFound desc = could not find container \"0ee04b95e1b73426c2df156aa3712b1e7cd32240737a43344c2bf0e704ff2c03\": container with ID starting with 0ee04b95e1b73426c2df156aa3712b1e7cd32240737a43344c2bf0e704ff2c03 not found: ID does not exist" Sep 30 02:35:18 crc kubenswrapper[4809]: I0930 02:35:18.126177 4809 scope.go:117] "RemoveContainer" containerID="2d8de4a988ff27fcf40ed350c68f796d8eb3595eaf1f89f40a6737eb50d36cbc" Sep 30 02:35:18 crc kubenswrapper[4809]: E0930 02:35:18.126589 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d8de4a988ff27fcf40ed350c68f796d8eb3595eaf1f89f40a6737eb50d36cbc\": container with ID starting with 2d8de4a988ff27fcf40ed350c68f796d8eb3595eaf1f89f40a6737eb50d36cbc not found: ID does not exist" containerID="2d8de4a988ff27fcf40ed350c68f796d8eb3595eaf1f89f40a6737eb50d36cbc" Sep 30 02:35:18 crc kubenswrapper[4809]: I0930 02:35:18.126630 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d8de4a988ff27fcf40ed350c68f796d8eb3595eaf1f89f40a6737eb50d36cbc"} err="failed to get container status \"2d8de4a988ff27fcf40ed350c68f796d8eb3595eaf1f89f40a6737eb50d36cbc\": rpc error: code = NotFound desc = could not find container \"2d8de4a988ff27fcf40ed350c68f796d8eb3595eaf1f89f40a6737eb50d36cbc\": container with ID starting with 2d8de4a988ff27fcf40ed350c68f796d8eb3595eaf1f89f40a6737eb50d36cbc not found: ID does not exist" Sep 30 02:35:18 crc kubenswrapper[4809]: I0930 02:35:18.359976 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-js6zz"] Sep 30 02:35:18 crc kubenswrapper[4809]: I0930 02:35:18.371661 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-js6zz"] Sep 30 02:35:19 crc kubenswrapper[4809]: I0930 02:35:19.712665 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a190f550-57c8-4651-9053-5f5a0526d635" path="/var/lib/kubelet/pods/a190f550-57c8-4651-9053-5f5a0526d635/volumes" Sep 30 02:36:55 crc kubenswrapper[4809]: I0930 02:36:55.324821 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:36:55 crc kubenswrapper[4809]: I0930 02:36:55.326984 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:37:19 crc kubenswrapper[4809]: I0930 02:37:19.138372 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-g9mrs"] Sep 30 02:37:19 crc kubenswrapper[4809]: E0930 02:37:19.139245 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a190f550-57c8-4651-9053-5f5a0526d635" containerName="extract-utilities" Sep 30 02:37:19 crc kubenswrapper[4809]: I0930 02:37:19.139258 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a190f550-57c8-4651-9053-5f5a0526d635" containerName="extract-utilities" Sep 30 02:37:19 crc kubenswrapper[4809]: E0930 02:37:19.139289 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a190f550-57c8-4651-9053-5f5a0526d635" containerName="extract-content" Sep 30 02:37:19 crc kubenswrapper[4809]: I0930 02:37:19.139295 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a190f550-57c8-4651-9053-5f5a0526d635" containerName="extract-content" Sep 30 02:37:19 crc kubenswrapper[4809]: E0930 02:37:19.139314 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a190f550-57c8-4651-9053-5f5a0526d635" containerName="registry-server" Sep 30 02:37:19 crc kubenswrapper[4809]: I0930 02:37:19.139320 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="a190f550-57c8-4651-9053-5f5a0526d635" containerName="registry-server" Sep 30 02:37:19 crc kubenswrapper[4809]: I0930 02:37:19.139521 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="a190f550-57c8-4651-9053-5f5a0526d635" containerName="registry-server" Sep 30 02:37:19 crc kubenswrapper[4809]: I0930 02:37:19.142764 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g9mrs" Sep 30 02:37:19 crc kubenswrapper[4809]: I0930 02:37:19.155256 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g9mrs"] Sep 30 02:37:19 crc kubenswrapper[4809]: I0930 02:37:19.202570 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9492cc5c-9147-4e3e-934f-9f55c707d2ff-utilities\") pod \"community-operators-g9mrs\" (UID: \"9492cc5c-9147-4e3e-934f-9f55c707d2ff\") " pod="openshift-marketplace/community-operators-g9mrs" Sep 30 02:37:19 crc kubenswrapper[4809]: I0930 02:37:19.202763 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9492cc5c-9147-4e3e-934f-9f55c707d2ff-catalog-content\") pod \"community-operators-g9mrs\" (UID: \"9492cc5c-9147-4e3e-934f-9f55c707d2ff\") " pod="openshift-marketplace/community-operators-g9mrs" Sep 30 02:37:19 crc kubenswrapper[4809]: I0930 02:37:19.202911 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbbbn\" (UniqueName: \"kubernetes.io/projected/9492cc5c-9147-4e3e-934f-9f55c707d2ff-kube-api-access-cbbbn\") pod \"community-operators-g9mrs\" (UID: \"9492cc5c-9147-4e3e-934f-9f55c707d2ff\") " pod="openshift-marketplace/community-operators-g9mrs" Sep 30 02:37:19 crc kubenswrapper[4809]: I0930 02:37:19.304482 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9492cc5c-9147-4e3e-934f-9f55c707d2ff-utilities\") pod \"community-operators-g9mrs\" (UID: \"9492cc5c-9147-4e3e-934f-9f55c707d2ff\") " pod="openshift-marketplace/community-operators-g9mrs" Sep 30 02:37:19 crc kubenswrapper[4809]: I0930 02:37:19.304656 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9492cc5c-9147-4e3e-934f-9f55c707d2ff-catalog-content\") pod \"community-operators-g9mrs\" (UID: \"9492cc5c-9147-4e3e-934f-9f55c707d2ff\") " pod="openshift-marketplace/community-operators-g9mrs" Sep 30 02:37:19 crc kubenswrapper[4809]: I0930 02:37:19.304773 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbbbn\" (UniqueName: \"kubernetes.io/projected/9492cc5c-9147-4e3e-934f-9f55c707d2ff-kube-api-access-cbbbn\") pod \"community-operators-g9mrs\" (UID: \"9492cc5c-9147-4e3e-934f-9f55c707d2ff\") " pod="openshift-marketplace/community-operators-g9mrs" Sep 30 02:37:19 crc kubenswrapper[4809]: I0930 02:37:19.305018 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9492cc5c-9147-4e3e-934f-9f55c707d2ff-utilities\") pod \"community-operators-g9mrs\" (UID: \"9492cc5c-9147-4e3e-934f-9f55c707d2ff\") " pod="openshift-marketplace/community-operators-g9mrs" Sep 30 02:37:19 crc kubenswrapper[4809]: I0930 02:37:19.305092 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9492cc5c-9147-4e3e-934f-9f55c707d2ff-catalog-content\") pod \"community-operators-g9mrs\" (UID: \"9492cc5c-9147-4e3e-934f-9f55c707d2ff\") " pod="openshift-marketplace/community-operators-g9mrs" Sep 30 02:37:19 crc kubenswrapper[4809]: I0930 02:37:19.327844 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbbbn\" (UniqueName: \"kubernetes.io/projected/9492cc5c-9147-4e3e-934f-9f55c707d2ff-kube-api-access-cbbbn\") pod \"community-operators-g9mrs\" (UID: \"9492cc5c-9147-4e3e-934f-9f55c707d2ff\") " pod="openshift-marketplace/community-operators-g9mrs" Sep 30 02:37:19 crc kubenswrapper[4809]: I0930 02:37:19.483577 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g9mrs" Sep 30 02:37:20 crc kubenswrapper[4809]: I0930 02:37:20.092026 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g9mrs"] Sep 30 02:37:20 crc kubenswrapper[4809]: I0930 02:37:20.501693 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g9mrs" event={"ID":"9492cc5c-9147-4e3e-934f-9f55c707d2ff","Type":"ContainerDied","Data":"f38ef333d3b936e8b6037942865b25da24dd10224ea4076735f8c490f8d48dc7"} Sep 30 02:37:20 crc kubenswrapper[4809]: I0930 02:37:20.501853 4809 generic.go:334] "Generic (PLEG): container finished" podID="9492cc5c-9147-4e3e-934f-9f55c707d2ff" containerID="f38ef333d3b936e8b6037942865b25da24dd10224ea4076735f8c490f8d48dc7" exitCode=0 Sep 30 02:37:20 crc kubenswrapper[4809]: I0930 02:37:20.502160 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g9mrs" event={"ID":"9492cc5c-9147-4e3e-934f-9f55c707d2ff","Type":"ContainerStarted","Data":"0aa2ad988378ea3b35908490e52d22f6833d3c8ceb998c8b2a64c6d33ef56174"} Sep 30 02:37:20 crc kubenswrapper[4809]: I0930 02:37:20.958677 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kndjl"] Sep 30 02:37:20 crc kubenswrapper[4809]: I0930 02:37:20.961886 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kndjl" Sep 30 02:37:20 crc kubenswrapper[4809]: I0930 02:37:20.980128 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kndjl"] Sep 30 02:37:21 crc kubenswrapper[4809]: I0930 02:37:21.043799 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fdfd08c-764d-4b67-9258-6be93428da7a-catalog-content\") pod \"redhat-marketplace-kndjl\" (UID: \"4fdfd08c-764d-4b67-9258-6be93428da7a\") " pod="openshift-marketplace/redhat-marketplace-kndjl" Sep 30 02:37:21 crc kubenswrapper[4809]: I0930 02:37:21.043919 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fdfd08c-764d-4b67-9258-6be93428da7a-utilities\") pod \"redhat-marketplace-kndjl\" (UID: \"4fdfd08c-764d-4b67-9258-6be93428da7a\") " pod="openshift-marketplace/redhat-marketplace-kndjl" Sep 30 02:37:21 crc kubenswrapper[4809]: I0930 02:37:21.043943 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzrkm\" (UniqueName: \"kubernetes.io/projected/4fdfd08c-764d-4b67-9258-6be93428da7a-kube-api-access-tzrkm\") pod \"redhat-marketplace-kndjl\" (UID: \"4fdfd08c-764d-4b67-9258-6be93428da7a\") " pod="openshift-marketplace/redhat-marketplace-kndjl" Sep 30 02:37:21 crc kubenswrapper[4809]: I0930 02:37:21.146300 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fdfd08c-764d-4b67-9258-6be93428da7a-catalog-content\") pod \"redhat-marketplace-kndjl\" (UID: \"4fdfd08c-764d-4b67-9258-6be93428da7a\") " pod="openshift-marketplace/redhat-marketplace-kndjl" Sep 30 02:37:21 crc kubenswrapper[4809]: I0930 02:37:21.146349 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fdfd08c-764d-4b67-9258-6be93428da7a-utilities\") pod \"redhat-marketplace-kndjl\" (UID: \"4fdfd08c-764d-4b67-9258-6be93428da7a\") " pod="openshift-marketplace/redhat-marketplace-kndjl" Sep 30 02:37:21 crc kubenswrapper[4809]: I0930 02:37:21.146371 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzrkm\" (UniqueName: \"kubernetes.io/projected/4fdfd08c-764d-4b67-9258-6be93428da7a-kube-api-access-tzrkm\") pod \"redhat-marketplace-kndjl\" (UID: \"4fdfd08c-764d-4b67-9258-6be93428da7a\") " pod="openshift-marketplace/redhat-marketplace-kndjl" Sep 30 02:37:21 crc kubenswrapper[4809]: I0930 02:37:21.147247 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fdfd08c-764d-4b67-9258-6be93428da7a-catalog-content\") pod \"redhat-marketplace-kndjl\" (UID: \"4fdfd08c-764d-4b67-9258-6be93428da7a\") " pod="openshift-marketplace/redhat-marketplace-kndjl" Sep 30 02:37:21 crc kubenswrapper[4809]: I0930 02:37:21.147782 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fdfd08c-764d-4b67-9258-6be93428da7a-utilities\") pod \"redhat-marketplace-kndjl\" (UID: \"4fdfd08c-764d-4b67-9258-6be93428da7a\") " pod="openshift-marketplace/redhat-marketplace-kndjl" Sep 30 02:37:21 crc kubenswrapper[4809]: I0930 02:37:21.171964 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzrkm\" (UniqueName: \"kubernetes.io/projected/4fdfd08c-764d-4b67-9258-6be93428da7a-kube-api-access-tzrkm\") pod \"redhat-marketplace-kndjl\" (UID: \"4fdfd08c-764d-4b67-9258-6be93428da7a\") " pod="openshift-marketplace/redhat-marketplace-kndjl" Sep 30 02:37:21 crc kubenswrapper[4809]: I0930 02:37:21.288154 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kndjl" Sep 30 02:37:21 crc kubenswrapper[4809]: I0930 02:37:21.757108 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kndjl"] Sep 30 02:37:22 crc kubenswrapper[4809]: I0930 02:37:22.541343 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g9mrs" event={"ID":"9492cc5c-9147-4e3e-934f-9f55c707d2ff","Type":"ContainerStarted","Data":"dc1312f280ca15ada9e7d388572e316b93ea9b159ff4aa5ae94d0799273a6e4e"} Sep 30 02:37:22 crc kubenswrapper[4809]: I0930 02:37:22.545387 4809 generic.go:334] "Generic (PLEG): container finished" podID="4fdfd08c-764d-4b67-9258-6be93428da7a" containerID="2e19640206710170cb4e25d15e266f6ca3545fe39e1f7a8534d1353008aa69a5" exitCode=0 Sep 30 02:37:22 crc kubenswrapper[4809]: I0930 02:37:22.545477 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kndjl" event={"ID":"4fdfd08c-764d-4b67-9258-6be93428da7a","Type":"ContainerDied","Data":"2e19640206710170cb4e25d15e266f6ca3545fe39e1f7a8534d1353008aa69a5"} Sep 30 02:37:22 crc kubenswrapper[4809]: I0930 02:37:22.545524 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kndjl" event={"ID":"4fdfd08c-764d-4b67-9258-6be93428da7a","Type":"ContainerStarted","Data":"95ca0f4b1a83f322d4c0eff267b5f64c49efcb8436527f43bcc7012547f16327"} Sep 30 02:37:23 crc kubenswrapper[4809]: I0930 02:37:23.563880 4809 generic.go:334] "Generic (PLEG): container finished" podID="9492cc5c-9147-4e3e-934f-9f55c707d2ff" containerID="dc1312f280ca15ada9e7d388572e316b93ea9b159ff4aa5ae94d0799273a6e4e" exitCode=0 Sep 30 02:37:23 crc kubenswrapper[4809]: I0930 02:37:23.564033 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g9mrs" event={"ID":"9492cc5c-9147-4e3e-934f-9f55c707d2ff","Type":"ContainerDied","Data":"dc1312f280ca15ada9e7d388572e316b93ea9b159ff4aa5ae94d0799273a6e4e"} Sep 30 02:37:24 crc kubenswrapper[4809]: I0930 02:37:24.579857 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g9mrs" event={"ID":"9492cc5c-9147-4e3e-934f-9f55c707d2ff","Type":"ContainerStarted","Data":"eb275c5ff326f0aef4176a2189beb6b29ef652d19bfce8472a5bde903364cbf3"} Sep 30 02:37:24 crc kubenswrapper[4809]: I0930 02:37:24.582560 4809 generic.go:334] "Generic (PLEG): container finished" podID="4fdfd08c-764d-4b67-9258-6be93428da7a" containerID="b318f9f777e01c2bb9f14383caef7cfc504bdae291f1ccd5bacf20efcfcdab96" exitCode=0 Sep 30 02:37:24 crc kubenswrapper[4809]: I0930 02:37:24.582639 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kndjl" event={"ID":"4fdfd08c-764d-4b67-9258-6be93428da7a","Type":"ContainerDied","Data":"b318f9f777e01c2bb9f14383caef7cfc504bdae291f1ccd5bacf20efcfcdab96"} Sep 30 02:37:24 crc kubenswrapper[4809]: I0930 02:37:24.617134 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-g9mrs" podStartSLOduration=2.01407649 podStartE2EDuration="5.617098427s" podCreationTimestamp="2025-09-30 02:37:19 +0000 UTC" firstStartedPulling="2025-09-30 02:37:20.504858956 +0000 UTC m=+8891.541108374" lastFinishedPulling="2025-09-30 02:37:24.107880903 +0000 UTC m=+8895.144130311" observedRunningTime="2025-09-30 02:37:24.601727626 +0000 UTC m=+8895.637977054" watchObservedRunningTime="2025-09-30 02:37:24.617098427 +0000 UTC m=+8895.653347885" Sep 30 02:37:25 crc kubenswrapper[4809]: I0930 02:37:25.325197 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:37:25 crc kubenswrapper[4809]: I0930 02:37:25.325594 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:37:25 crc kubenswrapper[4809]: I0930 02:37:25.594039 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kndjl" event={"ID":"4fdfd08c-764d-4b67-9258-6be93428da7a","Type":"ContainerStarted","Data":"737bfa8bc2cc191fef7a87686cc17b26be6cd0c8ec58c1b1901756bcece82214"} Sep 30 02:37:25 crc kubenswrapper[4809]: I0930 02:37:25.620575 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kndjl" podStartSLOduration=3.091621896 podStartE2EDuration="5.620531884s" podCreationTimestamp="2025-09-30 02:37:20 +0000 UTC" firstStartedPulling="2025-09-30 02:37:22.549446732 +0000 UTC m=+8893.585696150" lastFinishedPulling="2025-09-30 02:37:25.07835673 +0000 UTC m=+8896.114606138" observedRunningTime="2025-09-30 02:37:25.613047979 +0000 UTC m=+8896.649297407" watchObservedRunningTime="2025-09-30 02:37:25.620531884 +0000 UTC m=+8896.656781292" Sep 30 02:37:29 crc kubenswrapper[4809]: I0930 02:37:29.484786 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-g9mrs" Sep 30 02:37:29 crc kubenswrapper[4809]: I0930 02:37:29.485506 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-g9mrs" Sep 30 02:37:30 crc kubenswrapper[4809]: I0930 02:37:30.552250 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-g9mrs" podUID="9492cc5c-9147-4e3e-934f-9f55c707d2ff" containerName="registry-server" probeResult="failure" output=< Sep 30 02:37:30 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 02:37:30 crc kubenswrapper[4809]: > Sep 30 02:37:31 crc kubenswrapper[4809]: I0930 02:37:31.288777 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kndjl" Sep 30 02:37:31 crc kubenswrapper[4809]: I0930 02:37:31.288832 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kndjl" Sep 30 02:37:31 crc kubenswrapper[4809]: I0930 02:37:31.352060 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kndjl" Sep 30 02:37:31 crc kubenswrapper[4809]: I0930 02:37:31.759493 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kndjl" Sep 30 02:37:32 crc kubenswrapper[4809]: I0930 02:37:32.745511 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kndjl"] Sep 30 02:37:33 crc kubenswrapper[4809]: I0930 02:37:33.695582 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kndjl" podUID="4fdfd08c-764d-4b67-9258-6be93428da7a" containerName="registry-server" containerID="cri-o://737bfa8bc2cc191fef7a87686cc17b26be6cd0c8ec58c1b1901756bcece82214" gracePeriod=2 Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.242638 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kndjl" Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.371138 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fdfd08c-764d-4b67-9258-6be93428da7a-catalog-content\") pod \"4fdfd08c-764d-4b67-9258-6be93428da7a\" (UID: \"4fdfd08c-764d-4b67-9258-6be93428da7a\") " Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.371381 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzrkm\" (UniqueName: \"kubernetes.io/projected/4fdfd08c-764d-4b67-9258-6be93428da7a-kube-api-access-tzrkm\") pod \"4fdfd08c-764d-4b67-9258-6be93428da7a\" (UID: \"4fdfd08c-764d-4b67-9258-6be93428da7a\") " Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.371424 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fdfd08c-764d-4b67-9258-6be93428da7a-utilities\") pod \"4fdfd08c-764d-4b67-9258-6be93428da7a\" (UID: \"4fdfd08c-764d-4b67-9258-6be93428da7a\") " Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.372261 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fdfd08c-764d-4b67-9258-6be93428da7a-utilities" (OuterVolumeSpecName: "utilities") pod "4fdfd08c-764d-4b67-9258-6be93428da7a" (UID: "4fdfd08c-764d-4b67-9258-6be93428da7a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.379165 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fdfd08c-764d-4b67-9258-6be93428da7a-kube-api-access-tzrkm" (OuterVolumeSpecName: "kube-api-access-tzrkm") pod "4fdfd08c-764d-4b67-9258-6be93428da7a" (UID: "4fdfd08c-764d-4b67-9258-6be93428da7a"). InnerVolumeSpecName "kube-api-access-tzrkm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.389893 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fdfd08c-764d-4b67-9258-6be93428da7a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4fdfd08c-764d-4b67-9258-6be93428da7a" (UID: "4fdfd08c-764d-4b67-9258-6be93428da7a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.474232 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzrkm\" (UniqueName: \"kubernetes.io/projected/4fdfd08c-764d-4b67-9258-6be93428da7a-kube-api-access-tzrkm\") on node \"crc\" DevicePath \"\"" Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.474264 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fdfd08c-764d-4b67-9258-6be93428da7a-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.474275 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fdfd08c-764d-4b67-9258-6be93428da7a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.715969 4809 generic.go:334] "Generic (PLEG): container finished" podID="4fdfd08c-764d-4b67-9258-6be93428da7a" containerID="737bfa8bc2cc191fef7a87686cc17b26be6cd0c8ec58c1b1901756bcece82214" exitCode=0 Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.716036 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kndjl" event={"ID":"4fdfd08c-764d-4b67-9258-6be93428da7a","Type":"ContainerDied","Data":"737bfa8bc2cc191fef7a87686cc17b26be6cd0c8ec58c1b1901756bcece82214"} Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.716093 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kndjl" event={"ID":"4fdfd08c-764d-4b67-9258-6be93428da7a","Type":"ContainerDied","Data":"95ca0f4b1a83f322d4c0eff267b5f64c49efcb8436527f43bcc7012547f16327"} Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.716098 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kndjl" Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.716128 4809 scope.go:117] "RemoveContainer" containerID="737bfa8bc2cc191fef7a87686cc17b26be6cd0c8ec58c1b1901756bcece82214" Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.778794 4809 scope.go:117] "RemoveContainer" containerID="b318f9f777e01c2bb9f14383caef7cfc504bdae291f1ccd5bacf20efcfcdab96" Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.779914 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kndjl"] Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.790949 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kndjl"] Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.817084 4809 scope.go:117] "RemoveContainer" containerID="2e19640206710170cb4e25d15e266f6ca3545fe39e1f7a8534d1353008aa69a5" Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.899909 4809 scope.go:117] "RemoveContainer" containerID="737bfa8bc2cc191fef7a87686cc17b26be6cd0c8ec58c1b1901756bcece82214" Sep 30 02:37:34 crc kubenswrapper[4809]: E0930 02:37:34.900978 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"737bfa8bc2cc191fef7a87686cc17b26be6cd0c8ec58c1b1901756bcece82214\": container with ID starting with 737bfa8bc2cc191fef7a87686cc17b26be6cd0c8ec58c1b1901756bcece82214 not found: ID does not exist" containerID="737bfa8bc2cc191fef7a87686cc17b26be6cd0c8ec58c1b1901756bcece82214" Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.901030 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"737bfa8bc2cc191fef7a87686cc17b26be6cd0c8ec58c1b1901756bcece82214"} err="failed to get container status \"737bfa8bc2cc191fef7a87686cc17b26be6cd0c8ec58c1b1901756bcece82214\": rpc error: code = NotFound desc = could not find container \"737bfa8bc2cc191fef7a87686cc17b26be6cd0c8ec58c1b1901756bcece82214\": container with ID starting with 737bfa8bc2cc191fef7a87686cc17b26be6cd0c8ec58c1b1901756bcece82214 not found: ID does not exist" Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.901049 4809 scope.go:117] "RemoveContainer" containerID="b318f9f777e01c2bb9f14383caef7cfc504bdae291f1ccd5bacf20efcfcdab96" Sep 30 02:37:34 crc kubenswrapper[4809]: E0930 02:37:34.901473 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b318f9f777e01c2bb9f14383caef7cfc504bdae291f1ccd5bacf20efcfcdab96\": container with ID starting with b318f9f777e01c2bb9f14383caef7cfc504bdae291f1ccd5bacf20efcfcdab96 not found: ID does not exist" containerID="b318f9f777e01c2bb9f14383caef7cfc504bdae291f1ccd5bacf20efcfcdab96" Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.901535 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b318f9f777e01c2bb9f14383caef7cfc504bdae291f1ccd5bacf20efcfcdab96"} err="failed to get container status \"b318f9f777e01c2bb9f14383caef7cfc504bdae291f1ccd5bacf20efcfcdab96\": rpc error: code = NotFound desc = could not find container \"b318f9f777e01c2bb9f14383caef7cfc504bdae291f1ccd5bacf20efcfcdab96\": container with ID starting with b318f9f777e01c2bb9f14383caef7cfc504bdae291f1ccd5bacf20efcfcdab96 not found: ID does not exist" Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.901554 4809 scope.go:117] "RemoveContainer" containerID="2e19640206710170cb4e25d15e266f6ca3545fe39e1f7a8534d1353008aa69a5" Sep 30 02:37:34 crc kubenswrapper[4809]: E0930 02:37:34.902258 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e19640206710170cb4e25d15e266f6ca3545fe39e1f7a8534d1353008aa69a5\": container with ID starting with 2e19640206710170cb4e25d15e266f6ca3545fe39e1f7a8534d1353008aa69a5 not found: ID does not exist" containerID="2e19640206710170cb4e25d15e266f6ca3545fe39e1f7a8534d1353008aa69a5" Sep 30 02:37:34 crc kubenswrapper[4809]: I0930 02:37:34.902283 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e19640206710170cb4e25d15e266f6ca3545fe39e1f7a8534d1353008aa69a5"} err="failed to get container status \"2e19640206710170cb4e25d15e266f6ca3545fe39e1f7a8534d1353008aa69a5\": rpc error: code = NotFound desc = could not find container \"2e19640206710170cb4e25d15e266f6ca3545fe39e1f7a8534d1353008aa69a5\": container with ID starting with 2e19640206710170cb4e25d15e266f6ca3545fe39e1f7a8534d1353008aa69a5 not found: ID does not exist" Sep 30 02:37:35 crc kubenswrapper[4809]: I0930 02:37:35.709899 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fdfd08c-764d-4b67-9258-6be93428da7a" path="/var/lib/kubelet/pods/4fdfd08c-764d-4b67-9258-6be93428da7a/volumes" Sep 30 02:37:39 crc kubenswrapper[4809]: I0930 02:37:39.556477 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-g9mrs" Sep 30 02:37:39 crc kubenswrapper[4809]: I0930 02:37:39.637384 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-g9mrs" Sep 30 02:37:39 crc kubenswrapper[4809]: I0930 02:37:39.796743 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g9mrs"] Sep 30 02:37:40 crc kubenswrapper[4809]: I0930 02:37:40.833135 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-g9mrs" podUID="9492cc5c-9147-4e3e-934f-9f55c707d2ff" containerName="registry-server" containerID="cri-o://eb275c5ff326f0aef4176a2189beb6b29ef652d19bfce8472a5bde903364cbf3" gracePeriod=2 Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.441051 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g9mrs" Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.491761 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9492cc5c-9147-4e3e-934f-9f55c707d2ff-catalog-content\") pod \"9492cc5c-9147-4e3e-934f-9f55c707d2ff\" (UID: \"9492cc5c-9147-4e3e-934f-9f55c707d2ff\") " Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.491839 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbbbn\" (UniqueName: \"kubernetes.io/projected/9492cc5c-9147-4e3e-934f-9f55c707d2ff-kube-api-access-cbbbn\") pod \"9492cc5c-9147-4e3e-934f-9f55c707d2ff\" (UID: \"9492cc5c-9147-4e3e-934f-9f55c707d2ff\") " Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.491953 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9492cc5c-9147-4e3e-934f-9f55c707d2ff-utilities\") pod \"9492cc5c-9147-4e3e-934f-9f55c707d2ff\" (UID: \"9492cc5c-9147-4e3e-934f-9f55c707d2ff\") " Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.492855 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9492cc5c-9147-4e3e-934f-9f55c707d2ff-utilities" (OuterVolumeSpecName: "utilities") pod "9492cc5c-9147-4e3e-934f-9f55c707d2ff" (UID: "9492cc5c-9147-4e3e-934f-9f55c707d2ff"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.500284 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9492cc5c-9147-4e3e-934f-9f55c707d2ff-kube-api-access-cbbbn" (OuterVolumeSpecName: "kube-api-access-cbbbn") pod "9492cc5c-9147-4e3e-934f-9f55c707d2ff" (UID: "9492cc5c-9147-4e3e-934f-9f55c707d2ff"). InnerVolumeSpecName "kube-api-access-cbbbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.543164 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9492cc5c-9147-4e3e-934f-9f55c707d2ff-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9492cc5c-9147-4e3e-934f-9f55c707d2ff" (UID: "9492cc5c-9147-4e3e-934f-9f55c707d2ff"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.595280 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbbbn\" (UniqueName: \"kubernetes.io/projected/9492cc5c-9147-4e3e-934f-9f55c707d2ff-kube-api-access-cbbbn\") on node \"crc\" DevicePath \"\"" Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.595344 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9492cc5c-9147-4e3e-934f-9f55c707d2ff-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.595371 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9492cc5c-9147-4e3e-934f-9f55c707d2ff-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.850450 4809 generic.go:334] "Generic (PLEG): container finished" podID="9492cc5c-9147-4e3e-934f-9f55c707d2ff" containerID="eb275c5ff326f0aef4176a2189beb6b29ef652d19bfce8472a5bde903364cbf3" exitCode=0 Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.850515 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g9mrs" event={"ID":"9492cc5c-9147-4e3e-934f-9f55c707d2ff","Type":"ContainerDied","Data":"eb275c5ff326f0aef4176a2189beb6b29ef652d19bfce8472a5bde903364cbf3"} Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.850869 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g9mrs" event={"ID":"9492cc5c-9147-4e3e-934f-9f55c707d2ff","Type":"ContainerDied","Data":"0aa2ad988378ea3b35908490e52d22f6833d3c8ceb998c8b2a64c6d33ef56174"} Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.850907 4809 scope.go:117] "RemoveContainer" containerID="eb275c5ff326f0aef4176a2189beb6b29ef652d19bfce8472a5bde903364cbf3" Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.850599 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g9mrs" Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.902006 4809 scope.go:117] "RemoveContainer" containerID="dc1312f280ca15ada9e7d388572e316b93ea9b159ff4aa5ae94d0799273a6e4e" Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.903069 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g9mrs"] Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.922889 4809 scope.go:117] "RemoveContainer" containerID="f38ef333d3b936e8b6037942865b25da24dd10224ea4076735f8c490f8d48dc7" Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.923363 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-g9mrs"] Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.998118 4809 scope.go:117] "RemoveContainer" containerID="eb275c5ff326f0aef4176a2189beb6b29ef652d19bfce8472a5bde903364cbf3" Sep 30 02:37:41 crc kubenswrapper[4809]: E0930 02:37:41.998748 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb275c5ff326f0aef4176a2189beb6b29ef652d19bfce8472a5bde903364cbf3\": container with ID starting with eb275c5ff326f0aef4176a2189beb6b29ef652d19bfce8472a5bde903364cbf3 not found: ID does not exist" containerID="eb275c5ff326f0aef4176a2189beb6b29ef652d19bfce8472a5bde903364cbf3" Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.998787 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb275c5ff326f0aef4176a2189beb6b29ef652d19bfce8472a5bde903364cbf3"} err="failed to get container status \"eb275c5ff326f0aef4176a2189beb6b29ef652d19bfce8472a5bde903364cbf3\": rpc error: code = NotFound desc = could not find container \"eb275c5ff326f0aef4176a2189beb6b29ef652d19bfce8472a5bde903364cbf3\": container with ID starting with eb275c5ff326f0aef4176a2189beb6b29ef652d19bfce8472a5bde903364cbf3 not found: ID does not exist" Sep 30 02:37:41 crc kubenswrapper[4809]: I0930 02:37:41.998810 4809 scope.go:117] "RemoveContainer" containerID="dc1312f280ca15ada9e7d388572e316b93ea9b159ff4aa5ae94d0799273a6e4e" Sep 30 02:37:42 crc kubenswrapper[4809]: E0930 02:37:42.001200 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc1312f280ca15ada9e7d388572e316b93ea9b159ff4aa5ae94d0799273a6e4e\": container with ID starting with dc1312f280ca15ada9e7d388572e316b93ea9b159ff4aa5ae94d0799273a6e4e not found: ID does not exist" containerID="dc1312f280ca15ada9e7d388572e316b93ea9b159ff4aa5ae94d0799273a6e4e" Sep 30 02:37:42 crc kubenswrapper[4809]: I0930 02:37:42.001341 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc1312f280ca15ada9e7d388572e316b93ea9b159ff4aa5ae94d0799273a6e4e"} err="failed to get container status \"dc1312f280ca15ada9e7d388572e316b93ea9b159ff4aa5ae94d0799273a6e4e\": rpc error: code = NotFound desc = could not find container \"dc1312f280ca15ada9e7d388572e316b93ea9b159ff4aa5ae94d0799273a6e4e\": container with ID starting with dc1312f280ca15ada9e7d388572e316b93ea9b159ff4aa5ae94d0799273a6e4e not found: ID does not exist" Sep 30 02:37:42 crc kubenswrapper[4809]: I0930 02:37:42.001385 4809 scope.go:117] "RemoveContainer" containerID="f38ef333d3b936e8b6037942865b25da24dd10224ea4076735f8c490f8d48dc7" Sep 30 02:37:42 crc kubenswrapper[4809]: E0930 02:37:42.002860 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f38ef333d3b936e8b6037942865b25da24dd10224ea4076735f8c490f8d48dc7\": container with ID starting with f38ef333d3b936e8b6037942865b25da24dd10224ea4076735f8c490f8d48dc7 not found: ID does not exist" containerID="f38ef333d3b936e8b6037942865b25da24dd10224ea4076735f8c490f8d48dc7" Sep 30 02:37:42 crc kubenswrapper[4809]: I0930 02:37:42.002905 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f38ef333d3b936e8b6037942865b25da24dd10224ea4076735f8c490f8d48dc7"} err="failed to get container status \"f38ef333d3b936e8b6037942865b25da24dd10224ea4076735f8c490f8d48dc7\": rpc error: code = NotFound desc = could not find container \"f38ef333d3b936e8b6037942865b25da24dd10224ea4076735f8c490f8d48dc7\": container with ID starting with f38ef333d3b936e8b6037942865b25da24dd10224ea4076735f8c490f8d48dc7 not found: ID does not exist" Sep 30 02:37:43 crc kubenswrapper[4809]: I0930 02:37:43.710252 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9492cc5c-9147-4e3e-934f-9f55c707d2ff" path="/var/lib/kubelet/pods/9492cc5c-9147-4e3e-934f-9f55c707d2ff/volumes" Sep 30 02:37:55 crc kubenswrapper[4809]: I0930 02:37:55.324721 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:37:55 crc kubenswrapper[4809]: I0930 02:37:55.325229 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:37:55 crc kubenswrapper[4809]: I0930 02:37:55.325282 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 02:37:55 crc kubenswrapper[4809]: I0930 02:37:55.326188 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2bbc1b5ae2d7f94976345d37596f7094e7e2dbab97fff80bacb51904b6819d54"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 02:37:55 crc kubenswrapper[4809]: I0930 02:37:55.326257 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://2bbc1b5ae2d7f94976345d37596f7094e7e2dbab97fff80bacb51904b6819d54" gracePeriod=600 Sep 30 02:37:56 crc kubenswrapper[4809]: I0930 02:37:56.013285 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="2bbc1b5ae2d7f94976345d37596f7094e7e2dbab97fff80bacb51904b6819d54" exitCode=0 Sep 30 02:37:56 crc kubenswrapper[4809]: I0930 02:37:56.014154 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"2bbc1b5ae2d7f94976345d37596f7094e7e2dbab97fff80bacb51904b6819d54"} Sep 30 02:37:56 crc kubenswrapper[4809]: I0930 02:37:56.014206 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215"} Sep 30 02:37:56 crc kubenswrapper[4809]: I0930 02:37:56.014224 4809 scope.go:117] "RemoveContainer" containerID="8d20754beb25a1b52cc5cc085758e9c611019697e8921cd831c4d8d7114919ef" Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.144501 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9dx97"] Sep 30 02:39:22 crc kubenswrapper[4809]: E0930 02:39:22.145924 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9492cc5c-9147-4e3e-934f-9f55c707d2ff" containerName="extract-utilities" Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.145950 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9492cc5c-9147-4e3e-934f-9f55c707d2ff" containerName="extract-utilities" Sep 30 02:39:22 crc kubenswrapper[4809]: E0930 02:39:22.145990 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fdfd08c-764d-4b67-9258-6be93428da7a" containerName="registry-server" Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.146003 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fdfd08c-764d-4b67-9258-6be93428da7a" containerName="registry-server" Sep 30 02:39:22 crc kubenswrapper[4809]: E0930 02:39:22.146031 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fdfd08c-764d-4b67-9258-6be93428da7a" containerName="extract-content" Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.146042 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fdfd08c-764d-4b67-9258-6be93428da7a" containerName="extract-content" Sep 30 02:39:22 crc kubenswrapper[4809]: E0930 02:39:22.146065 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9492cc5c-9147-4e3e-934f-9f55c707d2ff" containerName="registry-server" Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.146075 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9492cc5c-9147-4e3e-934f-9f55c707d2ff" containerName="registry-server" Sep 30 02:39:22 crc kubenswrapper[4809]: E0930 02:39:22.146124 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9492cc5c-9147-4e3e-934f-9f55c707d2ff" containerName="extract-content" Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.146135 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="9492cc5c-9147-4e3e-934f-9f55c707d2ff" containerName="extract-content" Sep 30 02:39:22 crc kubenswrapper[4809]: E0930 02:39:22.146156 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fdfd08c-764d-4b67-9258-6be93428da7a" containerName="extract-utilities" Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.146166 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fdfd08c-764d-4b67-9258-6be93428da7a" containerName="extract-utilities" Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.146521 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fdfd08c-764d-4b67-9258-6be93428da7a" containerName="registry-server" Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.146588 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="9492cc5c-9147-4e3e-934f-9f55c707d2ff" containerName="registry-server" Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.149234 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9dx97" Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.160309 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9dx97"] Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.262575 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1-catalog-content\") pod \"redhat-operators-9dx97\" (UID: \"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1\") " pod="openshift-marketplace/redhat-operators-9dx97" Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.262685 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llcg6\" (UniqueName: \"kubernetes.io/projected/1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1-kube-api-access-llcg6\") pod \"redhat-operators-9dx97\" (UID: \"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1\") " pod="openshift-marketplace/redhat-operators-9dx97" Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.262811 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1-utilities\") pod \"redhat-operators-9dx97\" (UID: \"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1\") " pod="openshift-marketplace/redhat-operators-9dx97" Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.365288 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1-catalog-content\") pod \"redhat-operators-9dx97\" (UID: \"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1\") " pod="openshift-marketplace/redhat-operators-9dx97" Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.365370 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llcg6\" (UniqueName: \"kubernetes.io/projected/1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1-kube-api-access-llcg6\") pod \"redhat-operators-9dx97\" (UID: \"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1\") " pod="openshift-marketplace/redhat-operators-9dx97" Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.365423 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1-utilities\") pod \"redhat-operators-9dx97\" (UID: \"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1\") " pod="openshift-marketplace/redhat-operators-9dx97" Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.366362 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1-catalog-content\") pod \"redhat-operators-9dx97\" (UID: \"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1\") " pod="openshift-marketplace/redhat-operators-9dx97" Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.366401 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1-utilities\") pod \"redhat-operators-9dx97\" (UID: \"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1\") " pod="openshift-marketplace/redhat-operators-9dx97" Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.399486 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llcg6\" (UniqueName: \"kubernetes.io/projected/1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1-kube-api-access-llcg6\") pod \"redhat-operators-9dx97\" (UID: \"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1\") " pod="openshift-marketplace/redhat-operators-9dx97" Sep 30 02:39:22 crc kubenswrapper[4809]: I0930 02:39:22.493538 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9dx97" Sep 30 02:39:23 crc kubenswrapper[4809]: I0930 02:39:23.049302 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9dx97"] Sep 30 02:39:23 crc kubenswrapper[4809]: I0930 02:39:23.120982 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dx97" event={"ID":"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1","Type":"ContainerStarted","Data":"7f29ed10fdaa9285284a8a1af7714e6ac8efe4e7bd5a9af204c7951cce40788e"} Sep 30 02:39:24 crc kubenswrapper[4809]: I0930 02:39:24.141141 4809 generic.go:334] "Generic (PLEG): container finished" podID="1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1" containerID="6810331a6f0c2c458cdf26a7559aee84f0a9c2874ea3aa5d8f9a92372cc48000" exitCode=0 Sep 30 02:39:24 crc kubenswrapper[4809]: I0930 02:39:24.141261 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dx97" event={"ID":"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1","Type":"ContainerDied","Data":"6810331a6f0c2c458cdf26a7559aee84f0a9c2874ea3aa5d8f9a92372cc48000"} Sep 30 02:39:26 crc kubenswrapper[4809]: I0930 02:39:26.167117 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dx97" event={"ID":"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1","Type":"ContainerStarted","Data":"3e81aca865a0798b979eb00d516b4ae99a5f339852ae4ff474052aec5ba073a4"} Sep 30 02:39:29 crc kubenswrapper[4809]: I0930 02:39:29.207476 4809 generic.go:334] "Generic (PLEG): container finished" podID="1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1" containerID="3e81aca865a0798b979eb00d516b4ae99a5f339852ae4ff474052aec5ba073a4" exitCode=0 Sep 30 02:39:29 crc kubenswrapper[4809]: I0930 02:39:29.207565 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dx97" event={"ID":"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1","Type":"ContainerDied","Data":"3e81aca865a0798b979eb00d516b4ae99a5f339852ae4ff474052aec5ba073a4"} Sep 30 02:39:30 crc kubenswrapper[4809]: I0930 02:39:30.220004 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dx97" event={"ID":"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1","Type":"ContainerStarted","Data":"eef328c98b8475363478c5c3dc95266c64f35c84c1dc4bebc8ff282c598b7986"} Sep 30 02:39:30 crc kubenswrapper[4809]: I0930 02:39:30.252026 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9dx97" podStartSLOduration=2.713958984 podStartE2EDuration="8.252004355s" podCreationTimestamp="2025-09-30 02:39:22 +0000 UTC" firstStartedPulling="2025-09-30 02:39:24.146735367 +0000 UTC m=+9015.182984815" lastFinishedPulling="2025-09-30 02:39:29.684780738 +0000 UTC m=+9020.721030186" observedRunningTime="2025-09-30 02:39:30.245265211 +0000 UTC m=+9021.281514619" watchObservedRunningTime="2025-09-30 02:39:30.252004355 +0000 UTC m=+9021.288253773" Sep 30 02:39:32 crc kubenswrapper[4809]: I0930 02:39:32.493692 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9dx97" Sep 30 02:39:32 crc kubenswrapper[4809]: I0930 02:39:32.494257 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9dx97" Sep 30 02:39:33 crc kubenswrapper[4809]: I0930 02:39:33.568332 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-9dx97" podUID="1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1" containerName="registry-server" probeResult="failure" output=< Sep 30 02:39:33 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 02:39:33 crc kubenswrapper[4809]: > Sep 30 02:39:43 crc kubenswrapper[4809]: I0930 02:39:43.544410 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-9dx97" podUID="1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1" containerName="registry-server" probeResult="failure" output=< Sep 30 02:39:43 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 02:39:43 crc kubenswrapper[4809]: > Sep 30 02:39:52 crc kubenswrapper[4809]: I0930 02:39:52.564828 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9dx97" Sep 30 02:39:52 crc kubenswrapper[4809]: I0930 02:39:52.632657 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9dx97" Sep 30 02:39:53 crc kubenswrapper[4809]: I0930 02:39:53.342796 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9dx97"] Sep 30 02:39:54 crc kubenswrapper[4809]: I0930 02:39:54.500312 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9dx97" podUID="1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1" containerName="registry-server" containerID="cri-o://eef328c98b8475363478c5c3dc95266c64f35c84c1dc4bebc8ff282c598b7986" gracePeriod=2 Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.060945 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9dx97" Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.165666 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-llcg6\" (UniqueName: \"kubernetes.io/projected/1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1-kube-api-access-llcg6\") pod \"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1\" (UID: \"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1\") " Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.166175 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1-catalog-content\") pod \"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1\" (UID: \"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1\") " Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.166317 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1-utilities\") pod \"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1\" (UID: \"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1\") " Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.167842 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1-utilities" (OuterVolumeSpecName: "utilities") pod "1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1" (UID: "1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.177418 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1-kube-api-access-llcg6" (OuterVolumeSpecName: "kube-api-access-llcg6") pod "1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1" (UID: "1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1"). InnerVolumeSpecName "kube-api-access-llcg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.255271 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1" (UID: "1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.268930 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.268963 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.268977 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-llcg6\" (UniqueName: \"kubernetes.io/projected/1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1-kube-api-access-llcg6\") on node \"crc\" DevicePath \"\"" Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.325037 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.325112 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.514677 4809 generic.go:334] "Generic (PLEG): container finished" podID="1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1" containerID="eef328c98b8475363478c5c3dc95266c64f35c84c1dc4bebc8ff282c598b7986" exitCode=0 Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.514719 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dx97" event={"ID":"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1","Type":"ContainerDied","Data":"eef328c98b8475363478c5c3dc95266c64f35c84c1dc4bebc8ff282c598b7986"} Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.514744 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dx97" event={"ID":"1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1","Type":"ContainerDied","Data":"7f29ed10fdaa9285284a8a1af7714e6ac8efe4e7bd5a9af204c7951cce40788e"} Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.514761 4809 scope.go:117] "RemoveContainer" containerID="eef328c98b8475363478c5c3dc95266c64f35c84c1dc4bebc8ff282c598b7986" Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.514891 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9dx97" Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.559236 4809 scope.go:117] "RemoveContainer" containerID="3e81aca865a0798b979eb00d516b4ae99a5f339852ae4ff474052aec5ba073a4" Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.565587 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9dx97"] Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.587230 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9dx97"] Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.597137 4809 scope.go:117] "RemoveContainer" containerID="6810331a6f0c2c458cdf26a7559aee84f0a9c2874ea3aa5d8f9a92372cc48000" Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.642655 4809 scope.go:117] "RemoveContainer" containerID="eef328c98b8475363478c5c3dc95266c64f35c84c1dc4bebc8ff282c598b7986" Sep 30 02:39:55 crc kubenswrapper[4809]: E0930 02:39:55.643099 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eef328c98b8475363478c5c3dc95266c64f35c84c1dc4bebc8ff282c598b7986\": container with ID starting with eef328c98b8475363478c5c3dc95266c64f35c84c1dc4bebc8ff282c598b7986 not found: ID does not exist" containerID="eef328c98b8475363478c5c3dc95266c64f35c84c1dc4bebc8ff282c598b7986" Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.643131 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eef328c98b8475363478c5c3dc95266c64f35c84c1dc4bebc8ff282c598b7986"} err="failed to get container status \"eef328c98b8475363478c5c3dc95266c64f35c84c1dc4bebc8ff282c598b7986\": rpc error: code = NotFound desc = could not find container \"eef328c98b8475363478c5c3dc95266c64f35c84c1dc4bebc8ff282c598b7986\": container with ID starting with eef328c98b8475363478c5c3dc95266c64f35c84c1dc4bebc8ff282c598b7986 not found: ID does not exist" Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.643152 4809 scope.go:117] "RemoveContainer" containerID="3e81aca865a0798b979eb00d516b4ae99a5f339852ae4ff474052aec5ba073a4" Sep 30 02:39:55 crc kubenswrapper[4809]: E0930 02:39:55.643484 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e81aca865a0798b979eb00d516b4ae99a5f339852ae4ff474052aec5ba073a4\": container with ID starting with 3e81aca865a0798b979eb00d516b4ae99a5f339852ae4ff474052aec5ba073a4 not found: ID does not exist" containerID="3e81aca865a0798b979eb00d516b4ae99a5f339852ae4ff474052aec5ba073a4" Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.643504 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e81aca865a0798b979eb00d516b4ae99a5f339852ae4ff474052aec5ba073a4"} err="failed to get container status \"3e81aca865a0798b979eb00d516b4ae99a5f339852ae4ff474052aec5ba073a4\": rpc error: code = NotFound desc = could not find container \"3e81aca865a0798b979eb00d516b4ae99a5f339852ae4ff474052aec5ba073a4\": container with ID starting with 3e81aca865a0798b979eb00d516b4ae99a5f339852ae4ff474052aec5ba073a4 not found: ID does not exist" Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.643519 4809 scope.go:117] "RemoveContainer" containerID="6810331a6f0c2c458cdf26a7559aee84f0a9c2874ea3aa5d8f9a92372cc48000" Sep 30 02:39:55 crc kubenswrapper[4809]: E0930 02:39:55.643839 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6810331a6f0c2c458cdf26a7559aee84f0a9c2874ea3aa5d8f9a92372cc48000\": container with ID starting with 6810331a6f0c2c458cdf26a7559aee84f0a9c2874ea3aa5d8f9a92372cc48000 not found: ID does not exist" containerID="6810331a6f0c2c458cdf26a7559aee84f0a9c2874ea3aa5d8f9a92372cc48000" Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.643860 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6810331a6f0c2c458cdf26a7559aee84f0a9c2874ea3aa5d8f9a92372cc48000"} err="failed to get container status \"6810331a6f0c2c458cdf26a7559aee84f0a9c2874ea3aa5d8f9a92372cc48000\": rpc error: code = NotFound desc = could not find container \"6810331a6f0c2c458cdf26a7559aee84f0a9c2874ea3aa5d8f9a92372cc48000\": container with ID starting with 6810331a6f0c2c458cdf26a7559aee84f0a9c2874ea3aa5d8f9a92372cc48000 not found: ID does not exist" Sep 30 02:39:55 crc kubenswrapper[4809]: I0930 02:39:55.705539 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1" path="/var/lib/kubelet/pods/1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1/volumes" Sep 30 02:40:25 crc kubenswrapper[4809]: I0930 02:40:25.325371 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:40:25 crc kubenswrapper[4809]: I0930 02:40:25.326168 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:40:55 crc kubenswrapper[4809]: I0930 02:40:55.325094 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:40:55 crc kubenswrapper[4809]: I0930 02:40:55.326002 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:40:55 crc kubenswrapper[4809]: I0930 02:40:55.326069 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 02:40:55 crc kubenswrapper[4809]: I0930 02:40:55.327044 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 02:40:55 crc kubenswrapper[4809]: I0930 02:40:55.327118 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" gracePeriod=600 Sep 30 02:40:55 crc kubenswrapper[4809]: E0930 02:40:55.461322 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:40:56 crc kubenswrapper[4809]: I0930 02:40:56.295393 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" exitCode=0 Sep 30 02:40:56 crc kubenswrapper[4809]: I0930 02:40:56.295584 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215"} Sep 30 02:40:56 crc kubenswrapper[4809]: I0930 02:40:56.295715 4809 scope.go:117] "RemoveContainer" containerID="2bbc1b5ae2d7f94976345d37596f7094e7e2dbab97fff80bacb51904b6819d54" Sep 30 02:40:56 crc kubenswrapper[4809]: I0930 02:40:56.296729 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:40:56 crc kubenswrapper[4809]: E0930 02:40:56.297089 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:41:11 crc kubenswrapper[4809]: I0930 02:41:11.691532 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:41:11 crc kubenswrapper[4809]: E0930 02:41:11.692478 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:41:24 crc kubenswrapper[4809]: I0930 02:41:24.692809 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:41:24 crc kubenswrapper[4809]: E0930 02:41:24.693705 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:41:38 crc kubenswrapper[4809]: I0930 02:41:38.690897 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:41:38 crc kubenswrapper[4809]: E0930 02:41:38.691831 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:41:52 crc kubenswrapper[4809]: I0930 02:41:52.691613 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:41:52 crc kubenswrapper[4809]: E0930 02:41:52.692657 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:42:03 crc kubenswrapper[4809]: I0930 02:42:03.692007 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:42:03 crc kubenswrapper[4809]: E0930 02:42:03.693177 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:42:17 crc kubenswrapper[4809]: I0930 02:42:17.692339 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:42:17 crc kubenswrapper[4809]: E0930 02:42:17.693714 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:42:29 crc kubenswrapper[4809]: I0930 02:42:29.704628 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:42:29 crc kubenswrapper[4809]: E0930 02:42:29.705886 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:42:42 crc kubenswrapper[4809]: I0930 02:42:42.691408 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:42:42 crc kubenswrapper[4809]: E0930 02:42:42.692812 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:42:57 crc kubenswrapper[4809]: I0930 02:42:57.690919 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:42:57 crc kubenswrapper[4809]: E0930 02:42:57.691729 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:43:09 crc kubenswrapper[4809]: I0930 02:43:09.719490 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:43:09 crc kubenswrapper[4809]: E0930 02:43:09.720392 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:43:17 crc kubenswrapper[4809]: I0930 02:43:17.004449 4809 generic.go:334] "Generic (PLEG): container finished" podID="015d3aad-2e8e-4491-bf81-88058e25fe55" containerID="7876c535c150bb0a1aafd15b74f991aa3c4927489ed4b0a7505d22589ec78e08" exitCode=0 Sep 30 02:43:17 crc kubenswrapper[4809]: I0930 02:43:17.004607 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"015d3aad-2e8e-4491-bf81-88058e25fe55","Type":"ContainerDied","Data":"7876c535c150bb0a1aafd15b74f991aa3c4927489ed4b0a7505d22589ec78e08"} Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.494249 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.612227 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/015d3aad-2e8e-4491-bf81-88058e25fe55-test-operator-ephemeral-workdir\") pod \"015d3aad-2e8e-4491-bf81-88058e25fe55\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.612524 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/015d3aad-2e8e-4491-bf81-88058e25fe55-openstack-config-secret\") pod \"015d3aad-2e8e-4491-bf81-88058e25fe55\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.612558 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/015d3aad-2e8e-4491-bf81-88058e25fe55-test-operator-ephemeral-temporary\") pod \"015d3aad-2e8e-4491-bf81-88058e25fe55\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.612585 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"015d3aad-2e8e-4491-bf81-88058e25fe55\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.612692 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/015d3aad-2e8e-4491-bf81-88058e25fe55-config-data\") pod \"015d3aad-2e8e-4491-bf81-88058e25fe55\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.612723 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/015d3aad-2e8e-4491-bf81-88058e25fe55-ssh-key\") pod \"015d3aad-2e8e-4491-bf81-88058e25fe55\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.612893 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8m2qc\" (UniqueName: \"kubernetes.io/projected/015d3aad-2e8e-4491-bf81-88058e25fe55-kube-api-access-8m2qc\") pod \"015d3aad-2e8e-4491-bf81-88058e25fe55\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.612956 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/015d3aad-2e8e-4491-bf81-88058e25fe55-openstack-config\") pod \"015d3aad-2e8e-4491-bf81-88058e25fe55\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.613009 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/015d3aad-2e8e-4491-bf81-88058e25fe55-ca-certs\") pod \"015d3aad-2e8e-4491-bf81-88058e25fe55\" (UID: \"015d3aad-2e8e-4491-bf81-88058e25fe55\") " Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.613902 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/015d3aad-2e8e-4491-bf81-88058e25fe55-config-data" (OuterVolumeSpecName: "config-data") pod "015d3aad-2e8e-4491-bf81-88058e25fe55" (UID: "015d3aad-2e8e-4491-bf81-88058e25fe55"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.615421 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/015d3aad-2e8e-4491-bf81-88058e25fe55-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "015d3aad-2e8e-4491-bf81-88058e25fe55" (UID: "015d3aad-2e8e-4491-bf81-88058e25fe55"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.618856 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "test-operator-logs") pod "015d3aad-2e8e-4491-bf81-88058e25fe55" (UID: "015d3aad-2e8e-4491-bf81-88058e25fe55"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.619697 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/015d3aad-2e8e-4491-bf81-88058e25fe55-kube-api-access-8m2qc" (OuterVolumeSpecName: "kube-api-access-8m2qc") pod "015d3aad-2e8e-4491-bf81-88058e25fe55" (UID: "015d3aad-2e8e-4491-bf81-88058e25fe55"). InnerVolumeSpecName "kube-api-access-8m2qc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.621993 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/015d3aad-2e8e-4491-bf81-88058e25fe55-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "015d3aad-2e8e-4491-bf81-88058e25fe55" (UID: "015d3aad-2e8e-4491-bf81-88058e25fe55"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.644443 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/015d3aad-2e8e-4491-bf81-88058e25fe55-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "015d3aad-2e8e-4491-bf81-88058e25fe55" (UID: "015d3aad-2e8e-4491-bf81-88058e25fe55"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.658240 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/015d3aad-2e8e-4491-bf81-88058e25fe55-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "015d3aad-2e8e-4491-bf81-88058e25fe55" (UID: "015d3aad-2e8e-4491-bf81-88058e25fe55"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.660311 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/015d3aad-2e8e-4491-bf81-88058e25fe55-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "015d3aad-2e8e-4491-bf81-88058e25fe55" (UID: "015d3aad-2e8e-4491-bf81-88058e25fe55"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.681951 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/015d3aad-2e8e-4491-bf81-88058e25fe55-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "015d3aad-2e8e-4491-bf81-88058e25fe55" (UID: "015d3aad-2e8e-4491-bf81-88058e25fe55"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.719184 4809 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.719236 4809 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/015d3aad-2e8e-4491-bf81-88058e25fe55-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.719388 4809 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/015d3aad-2e8e-4491-bf81-88058e25fe55-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.719444 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8m2qc\" (UniqueName: \"kubernetes.io/projected/015d3aad-2e8e-4491-bf81-88058e25fe55-kube-api-access-8m2qc\") on node \"crc\" DevicePath \"\"" Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.719464 4809 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/015d3aad-2e8e-4491-bf81-88058e25fe55-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.719483 4809 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/015d3aad-2e8e-4491-bf81-88058e25fe55-ca-certs\") on node \"crc\" DevicePath \"\"" Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.719502 4809 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/015d3aad-2e8e-4491-bf81-88058e25fe55-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.719518 4809 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/015d3aad-2e8e-4491-bf81-88058e25fe55-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.719537 4809 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/015d3aad-2e8e-4491-bf81-88058e25fe55-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.763128 4809 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Sep 30 02:43:18 crc kubenswrapper[4809]: I0930 02:43:18.822422 4809 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Sep 30 02:43:19 crc kubenswrapper[4809]: I0930 02:43:19.031902 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"015d3aad-2e8e-4491-bf81-88058e25fe55","Type":"ContainerDied","Data":"66e13c9281a58b4601e7b368998459efbbc31e83b1f3fe97214f290e0330eca6"} Sep 30 02:43:19 crc kubenswrapper[4809]: I0930 02:43:19.031985 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66e13c9281a58b4601e7b368998459efbbc31e83b1f3fe97214f290e0330eca6" Sep 30 02:43:19 crc kubenswrapper[4809]: I0930 02:43:19.032119 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 30 02:43:21 crc kubenswrapper[4809]: I0930 02:43:21.692351 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:43:21 crc kubenswrapper[4809]: E0930 02:43:21.693358 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:43:27 crc kubenswrapper[4809]: I0930 02:43:27.621698 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 30 02:43:27 crc kubenswrapper[4809]: E0930 02:43:27.624706 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1" containerName="extract-content" Sep 30 02:43:27 crc kubenswrapper[4809]: I0930 02:43:27.624769 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1" containerName="extract-content" Sep 30 02:43:27 crc kubenswrapper[4809]: E0930 02:43:27.624817 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="015d3aad-2e8e-4491-bf81-88058e25fe55" containerName="tempest-tests-tempest-tests-runner" Sep 30 02:43:27 crc kubenswrapper[4809]: I0930 02:43:27.624835 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="015d3aad-2e8e-4491-bf81-88058e25fe55" containerName="tempest-tests-tempest-tests-runner" Sep 30 02:43:27 crc kubenswrapper[4809]: E0930 02:43:27.624891 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1" containerName="extract-utilities" Sep 30 02:43:27 crc kubenswrapper[4809]: I0930 02:43:27.624909 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1" containerName="extract-utilities" Sep 30 02:43:27 crc kubenswrapper[4809]: E0930 02:43:27.624944 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1" containerName="registry-server" Sep 30 02:43:27 crc kubenswrapper[4809]: I0930 02:43:27.624960 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1" containerName="registry-server" Sep 30 02:43:27 crc kubenswrapper[4809]: I0930 02:43:27.625486 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="015d3aad-2e8e-4491-bf81-88058e25fe55" containerName="tempest-tests-tempest-tests-runner" Sep 30 02:43:27 crc kubenswrapper[4809]: I0930 02:43:27.625589 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c4fa5ba-d96d-465e-8ed5-307dfbc73cb1" containerName="registry-server" Sep 30 02:43:27 crc kubenswrapper[4809]: I0930 02:43:27.627065 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 02:43:27 crc kubenswrapper[4809]: I0930 02:43:27.639856 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-gm56j" Sep 30 02:43:27 crc kubenswrapper[4809]: I0930 02:43:27.640442 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 30 02:43:27 crc kubenswrapper[4809]: I0930 02:43:27.746557 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6ws2\" (UniqueName: \"kubernetes.io/projected/92b038e4-311d-43cc-962a-384be228c12d-kube-api-access-n6ws2\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"92b038e4-311d-43cc-962a-384be228c12d\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 02:43:27 crc kubenswrapper[4809]: I0930 02:43:27.746864 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"92b038e4-311d-43cc-962a-384be228c12d\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 02:43:27 crc kubenswrapper[4809]: I0930 02:43:27.849041 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6ws2\" (UniqueName: \"kubernetes.io/projected/92b038e4-311d-43cc-962a-384be228c12d-kube-api-access-n6ws2\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"92b038e4-311d-43cc-962a-384be228c12d\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 02:43:27 crc kubenswrapper[4809]: I0930 02:43:27.849133 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"92b038e4-311d-43cc-962a-384be228c12d\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 02:43:27 crc kubenswrapper[4809]: I0930 02:43:27.851130 4809 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"92b038e4-311d-43cc-962a-384be228c12d\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 02:43:27 crc kubenswrapper[4809]: I0930 02:43:27.876688 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6ws2\" (UniqueName: \"kubernetes.io/projected/92b038e4-311d-43cc-962a-384be228c12d-kube-api-access-n6ws2\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"92b038e4-311d-43cc-962a-384be228c12d\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 02:43:27 crc kubenswrapper[4809]: I0930 02:43:27.882150 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"92b038e4-311d-43cc-962a-384be228c12d\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 02:43:27 crc kubenswrapper[4809]: I0930 02:43:27.970326 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 02:43:28 crc kubenswrapper[4809]: I0930 02:43:28.475935 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 30 02:43:28 crc kubenswrapper[4809]: I0930 02:43:28.488907 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 02:43:29 crc kubenswrapper[4809]: I0930 02:43:29.165047 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"92b038e4-311d-43cc-962a-384be228c12d","Type":"ContainerStarted","Data":"500916c32baa0fcb3934e821fc3931f13bf4d41f09a56d3a0037df2ad1b26608"} Sep 30 02:43:30 crc kubenswrapper[4809]: I0930 02:43:30.178755 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"92b038e4-311d-43cc-962a-384be228c12d","Type":"ContainerStarted","Data":"5601776752a2314a98647b219f921af672df1c94d5b349d03a362e1e73b4aa73"} Sep 30 02:43:30 crc kubenswrapper[4809]: I0930 02:43:30.195858 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.808816768 podStartE2EDuration="3.195837299s" podCreationTimestamp="2025-09-30 02:43:27 +0000 UTC" firstStartedPulling="2025-09-30 02:43:28.488572564 +0000 UTC m=+9259.524821982" lastFinishedPulling="2025-09-30 02:43:29.875593075 +0000 UTC m=+9260.911842513" observedRunningTime="2025-09-30 02:43:30.194147602 +0000 UTC m=+9261.230397020" watchObservedRunningTime="2025-09-30 02:43:30.195837299 +0000 UTC m=+9261.232086717" Sep 30 02:43:34 crc kubenswrapper[4809]: I0930 02:43:34.691504 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:43:34 crc kubenswrapper[4809]: E0930 02:43:34.692549 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:43:47 crc kubenswrapper[4809]: I0930 02:43:47.691380 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:43:47 crc kubenswrapper[4809]: E0930 02:43:47.692171 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:43:56 crc kubenswrapper[4809]: I0930 02:43:56.953512 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-cwlbq/must-gather-b6znr"] Sep 30 02:43:56 crc kubenswrapper[4809]: I0930 02:43:56.955731 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cwlbq/must-gather-b6znr" Sep 30 02:43:56 crc kubenswrapper[4809]: I0930 02:43:56.960965 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-cwlbq"/"default-dockercfg-82j27" Sep 30 02:43:56 crc kubenswrapper[4809]: I0930 02:43:56.961136 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-cwlbq"/"kube-root-ca.crt" Sep 30 02:43:56 crc kubenswrapper[4809]: I0930 02:43:56.965336 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-cwlbq"/"openshift-service-ca.crt" Sep 30 02:43:56 crc kubenswrapper[4809]: I0930 02:43:56.971496 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-cwlbq/must-gather-b6znr"] Sep 30 02:43:57 crc kubenswrapper[4809]: I0930 02:43:57.071886 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhf6l\" (UniqueName: \"kubernetes.io/projected/e2a3c2cf-805e-4187-9f1c-064f7732236d-kube-api-access-bhf6l\") pod \"must-gather-b6znr\" (UID: \"e2a3c2cf-805e-4187-9f1c-064f7732236d\") " pod="openshift-must-gather-cwlbq/must-gather-b6znr" Sep 30 02:43:57 crc kubenswrapper[4809]: I0930 02:43:57.071971 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e2a3c2cf-805e-4187-9f1c-064f7732236d-must-gather-output\") pod \"must-gather-b6znr\" (UID: \"e2a3c2cf-805e-4187-9f1c-064f7732236d\") " pod="openshift-must-gather-cwlbq/must-gather-b6znr" Sep 30 02:43:57 crc kubenswrapper[4809]: I0930 02:43:57.174152 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e2a3c2cf-805e-4187-9f1c-064f7732236d-must-gather-output\") pod \"must-gather-b6znr\" (UID: \"e2a3c2cf-805e-4187-9f1c-064f7732236d\") " pod="openshift-must-gather-cwlbq/must-gather-b6znr" Sep 30 02:43:57 crc kubenswrapper[4809]: I0930 02:43:57.174349 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhf6l\" (UniqueName: \"kubernetes.io/projected/e2a3c2cf-805e-4187-9f1c-064f7732236d-kube-api-access-bhf6l\") pod \"must-gather-b6znr\" (UID: \"e2a3c2cf-805e-4187-9f1c-064f7732236d\") " pod="openshift-must-gather-cwlbq/must-gather-b6znr" Sep 30 02:43:57 crc kubenswrapper[4809]: I0930 02:43:57.174595 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e2a3c2cf-805e-4187-9f1c-064f7732236d-must-gather-output\") pod \"must-gather-b6znr\" (UID: \"e2a3c2cf-805e-4187-9f1c-064f7732236d\") " pod="openshift-must-gather-cwlbq/must-gather-b6znr" Sep 30 02:43:57 crc kubenswrapper[4809]: I0930 02:43:57.206946 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhf6l\" (UniqueName: \"kubernetes.io/projected/e2a3c2cf-805e-4187-9f1c-064f7732236d-kube-api-access-bhf6l\") pod \"must-gather-b6znr\" (UID: \"e2a3c2cf-805e-4187-9f1c-064f7732236d\") " pod="openshift-must-gather-cwlbq/must-gather-b6znr" Sep 30 02:43:57 crc kubenswrapper[4809]: I0930 02:43:57.274007 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cwlbq/must-gather-b6znr" Sep 30 02:43:57 crc kubenswrapper[4809]: I0930 02:43:57.847385 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-cwlbq/must-gather-b6znr"] Sep 30 02:43:58 crc kubenswrapper[4809]: I0930 02:43:58.631015 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cwlbq/must-gather-b6znr" event={"ID":"e2a3c2cf-805e-4187-9f1c-064f7732236d","Type":"ContainerStarted","Data":"9edeb255c788927e2362cbff3f12e908e7199cae6a65d4f2dff81eb509d04c31"} Sep 30 02:44:00 crc kubenswrapper[4809]: I0930 02:44:00.691101 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:44:00 crc kubenswrapper[4809]: E0930 02:44:00.691703 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:44:02 crc kubenswrapper[4809]: I0930 02:44:02.686155 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cwlbq/must-gather-b6znr" event={"ID":"e2a3c2cf-805e-4187-9f1c-064f7732236d","Type":"ContainerStarted","Data":"5a3d56e484ccea159020c9187794d522c86c01d518b56d16a23a0d8e2474d49b"} Sep 30 02:44:02 crc kubenswrapper[4809]: I0930 02:44:02.686813 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cwlbq/must-gather-b6znr" event={"ID":"e2a3c2cf-805e-4187-9f1c-064f7732236d","Type":"ContainerStarted","Data":"6ab9a517d9ac2bae577545c9198dbd0d058503f5bba479aeabf958a0d25de348"} Sep 30 02:44:02 crc kubenswrapper[4809]: I0930 02:44:02.719994 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-cwlbq/must-gather-b6znr" podStartSLOduration=2.6773019639999998 podStartE2EDuration="6.719978975s" podCreationTimestamp="2025-09-30 02:43:56 +0000 UTC" firstStartedPulling="2025-09-30 02:43:57.856147044 +0000 UTC m=+9288.892396452" lastFinishedPulling="2025-09-30 02:44:01.898824055 +0000 UTC m=+9292.935073463" observedRunningTime="2025-09-30 02:44:02.715806182 +0000 UTC m=+9293.752055590" watchObservedRunningTime="2025-09-30 02:44:02.719978975 +0000 UTC m=+9293.756228383" Sep 30 02:44:08 crc kubenswrapper[4809]: I0930 02:44:08.238362 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-cwlbq/crc-debug-x8xk6"] Sep 30 02:44:08 crc kubenswrapper[4809]: I0930 02:44:08.243826 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cwlbq/crc-debug-x8xk6" Sep 30 02:44:08 crc kubenswrapper[4809]: I0930 02:44:08.363576 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6440d880-96a1-40ed-9b9f-c0b2a08eecb6-host\") pod \"crc-debug-x8xk6\" (UID: \"6440d880-96a1-40ed-9b9f-c0b2a08eecb6\") " pod="openshift-must-gather-cwlbq/crc-debug-x8xk6" Sep 30 02:44:08 crc kubenswrapper[4809]: I0930 02:44:08.363623 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vm577\" (UniqueName: \"kubernetes.io/projected/6440d880-96a1-40ed-9b9f-c0b2a08eecb6-kube-api-access-vm577\") pod \"crc-debug-x8xk6\" (UID: \"6440d880-96a1-40ed-9b9f-c0b2a08eecb6\") " pod="openshift-must-gather-cwlbq/crc-debug-x8xk6" Sep 30 02:44:08 crc kubenswrapper[4809]: I0930 02:44:08.466715 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6440d880-96a1-40ed-9b9f-c0b2a08eecb6-host\") pod \"crc-debug-x8xk6\" (UID: \"6440d880-96a1-40ed-9b9f-c0b2a08eecb6\") " pod="openshift-must-gather-cwlbq/crc-debug-x8xk6" Sep 30 02:44:08 crc kubenswrapper[4809]: I0930 02:44:08.466761 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vm577\" (UniqueName: \"kubernetes.io/projected/6440d880-96a1-40ed-9b9f-c0b2a08eecb6-kube-api-access-vm577\") pod \"crc-debug-x8xk6\" (UID: \"6440d880-96a1-40ed-9b9f-c0b2a08eecb6\") " pod="openshift-must-gather-cwlbq/crc-debug-x8xk6" Sep 30 02:44:08 crc kubenswrapper[4809]: I0930 02:44:08.468237 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6440d880-96a1-40ed-9b9f-c0b2a08eecb6-host\") pod \"crc-debug-x8xk6\" (UID: \"6440d880-96a1-40ed-9b9f-c0b2a08eecb6\") " pod="openshift-must-gather-cwlbq/crc-debug-x8xk6" Sep 30 02:44:08 crc kubenswrapper[4809]: I0930 02:44:08.491238 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vm577\" (UniqueName: \"kubernetes.io/projected/6440d880-96a1-40ed-9b9f-c0b2a08eecb6-kube-api-access-vm577\") pod \"crc-debug-x8xk6\" (UID: \"6440d880-96a1-40ed-9b9f-c0b2a08eecb6\") " pod="openshift-must-gather-cwlbq/crc-debug-x8xk6" Sep 30 02:44:08 crc kubenswrapper[4809]: I0930 02:44:08.562137 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cwlbq/crc-debug-x8xk6" Sep 30 02:44:08 crc kubenswrapper[4809]: W0930 02:44:08.601654 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6440d880_96a1_40ed_9b9f_c0b2a08eecb6.slice/crio-40cd5cac211448fc0e40e049cfa3ae3b406891bae9f1f03962282486d87a41c5 WatchSource:0}: Error finding container 40cd5cac211448fc0e40e049cfa3ae3b406891bae9f1f03962282486d87a41c5: Status 404 returned error can't find the container with id 40cd5cac211448fc0e40e049cfa3ae3b406891bae9f1f03962282486d87a41c5 Sep 30 02:44:08 crc kubenswrapper[4809]: I0930 02:44:08.769509 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cwlbq/crc-debug-x8xk6" event={"ID":"6440d880-96a1-40ed-9b9f-c0b2a08eecb6","Type":"ContainerStarted","Data":"40cd5cac211448fc0e40e049cfa3ae3b406891bae9f1f03962282486d87a41c5"} Sep 30 02:44:11 crc kubenswrapper[4809]: I0930 02:44:11.696017 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:44:11 crc kubenswrapper[4809]: E0930 02:44:11.696850 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:44:21 crc kubenswrapper[4809]: I0930 02:44:21.907951 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cwlbq/crc-debug-x8xk6" event={"ID":"6440d880-96a1-40ed-9b9f-c0b2a08eecb6","Type":"ContainerStarted","Data":"41697243e9d05105232c9cc2eb41650b860aa5aa1ed74e93e9363d4e47941d39"} Sep 30 02:44:21 crc kubenswrapper[4809]: I0930 02:44:21.938278 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-cwlbq/crc-debug-x8xk6" podStartSLOduration=1.6478511930000002 podStartE2EDuration="13.93825322s" podCreationTimestamp="2025-09-30 02:44:08 +0000 UTC" firstStartedPulling="2025-09-30 02:44:08.603748775 +0000 UTC m=+9299.639998183" lastFinishedPulling="2025-09-30 02:44:20.894150802 +0000 UTC m=+9311.930400210" observedRunningTime="2025-09-30 02:44:21.921892954 +0000 UTC m=+9312.958142402" watchObservedRunningTime="2025-09-30 02:44:21.93825322 +0000 UTC m=+9312.974502638" Sep 30 02:44:26 crc kubenswrapper[4809]: I0930 02:44:26.691065 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:44:26 crc kubenswrapper[4809]: E0930 02:44:26.691876 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:44:38 crc kubenswrapper[4809]: I0930 02:44:38.691567 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:44:38 crc kubenswrapper[4809]: E0930 02:44:38.692615 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:44:53 crc kubenswrapper[4809]: I0930 02:44:53.691086 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:44:53 crc kubenswrapper[4809]: E0930 02:44:53.691914 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:44:57 crc kubenswrapper[4809]: I0930 02:44:57.705269 4809 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-nmstate/nmstate-handler-t4sfn" podUID="7be99730-5c11-4b2b-b063-3500766ddfe6" containerName="nmstate-handler" probeResult="failure" output="command timed out" Sep 30 02:45:00 crc kubenswrapper[4809]: I0930 02:45:00.599760 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw"] Sep 30 02:45:00 crc kubenswrapper[4809]: I0930 02:45:00.602253 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw" Sep 30 02:45:00 crc kubenswrapper[4809]: I0930 02:45:00.605578 4809 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 02:45:00 crc kubenswrapper[4809]: I0930 02:45:00.606272 4809 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 02:45:00 crc kubenswrapper[4809]: I0930 02:45:00.611956 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw"] Sep 30 02:45:00 crc kubenswrapper[4809]: I0930 02:45:00.743321 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvctl\" (UniqueName: \"kubernetes.io/projected/e712b852-d5e6-4352-8187-df4338325d51-kube-api-access-qvctl\") pod \"collect-profiles-29320005-dltmw\" (UID: \"e712b852-d5e6-4352-8187-df4338325d51\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw" Sep 30 02:45:00 crc kubenswrapper[4809]: I0930 02:45:00.743384 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e712b852-d5e6-4352-8187-df4338325d51-config-volume\") pod \"collect-profiles-29320005-dltmw\" (UID: \"e712b852-d5e6-4352-8187-df4338325d51\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw" Sep 30 02:45:00 crc kubenswrapper[4809]: I0930 02:45:00.743542 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e712b852-d5e6-4352-8187-df4338325d51-secret-volume\") pod \"collect-profiles-29320005-dltmw\" (UID: \"e712b852-d5e6-4352-8187-df4338325d51\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw" Sep 30 02:45:00 crc kubenswrapper[4809]: I0930 02:45:00.845767 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvctl\" (UniqueName: \"kubernetes.io/projected/e712b852-d5e6-4352-8187-df4338325d51-kube-api-access-qvctl\") pod \"collect-profiles-29320005-dltmw\" (UID: \"e712b852-d5e6-4352-8187-df4338325d51\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw" Sep 30 02:45:00 crc kubenswrapper[4809]: I0930 02:45:00.845851 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e712b852-d5e6-4352-8187-df4338325d51-config-volume\") pod \"collect-profiles-29320005-dltmw\" (UID: \"e712b852-d5e6-4352-8187-df4338325d51\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw" Sep 30 02:45:00 crc kubenswrapper[4809]: I0930 02:45:00.847148 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e712b852-d5e6-4352-8187-df4338325d51-config-volume\") pod \"collect-profiles-29320005-dltmw\" (UID: \"e712b852-d5e6-4352-8187-df4338325d51\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw" Sep 30 02:45:00 crc kubenswrapper[4809]: I0930 02:45:00.847728 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e712b852-d5e6-4352-8187-df4338325d51-secret-volume\") pod \"collect-profiles-29320005-dltmw\" (UID: \"e712b852-d5e6-4352-8187-df4338325d51\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw" Sep 30 02:45:00 crc kubenswrapper[4809]: I0930 02:45:00.870115 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e712b852-d5e6-4352-8187-df4338325d51-secret-volume\") pod \"collect-profiles-29320005-dltmw\" (UID: \"e712b852-d5e6-4352-8187-df4338325d51\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw" Sep 30 02:45:00 crc kubenswrapper[4809]: I0930 02:45:00.878461 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvctl\" (UniqueName: \"kubernetes.io/projected/e712b852-d5e6-4352-8187-df4338325d51-kube-api-access-qvctl\") pod \"collect-profiles-29320005-dltmw\" (UID: \"e712b852-d5e6-4352-8187-df4338325d51\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw" Sep 30 02:45:00 crc kubenswrapper[4809]: I0930 02:45:00.939623 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw" Sep 30 02:45:03 crc kubenswrapper[4809]: I0930 02:45:03.897062 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw"] Sep 30 02:45:04 crc kubenswrapper[4809]: W0930 02:45:04.150984 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode712b852_d5e6_4352_8187_df4338325d51.slice/crio-373904e096ce0d2a5673c58b4567a6a00feaf7b43009d400b0965dda5d013187 WatchSource:0}: Error finding container 373904e096ce0d2a5673c58b4567a6a00feaf7b43009d400b0965dda5d013187: Status 404 returned error can't find the container with id 373904e096ce0d2a5673c58b4567a6a00feaf7b43009d400b0965dda5d013187 Sep 30 02:45:04 crc kubenswrapper[4809]: I0930 02:45:04.325779 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw" event={"ID":"e712b852-d5e6-4352-8187-df4338325d51","Type":"ContainerStarted","Data":"373904e096ce0d2a5673c58b4567a6a00feaf7b43009d400b0965dda5d013187"} Sep 30 02:45:05 crc kubenswrapper[4809]: I0930 02:45:05.339374 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw" event={"ID":"e712b852-d5e6-4352-8187-df4338325d51","Type":"ContainerStarted","Data":"d79626b044261b9ffacdf9bb8ea41ef110bf2945456ac0a49834d433d163b6d6"} Sep 30 02:45:05 crc kubenswrapper[4809]: I0930 02:45:05.361782 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw" podStartSLOduration=5.361764943 podStartE2EDuration="5.361764943s" podCreationTimestamp="2025-09-30 02:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 02:45:05.356963742 +0000 UTC m=+9356.393213150" watchObservedRunningTime="2025-09-30 02:45:05.361764943 +0000 UTC m=+9356.398014351" Sep 30 02:45:05 crc kubenswrapper[4809]: I0930 02:45:05.690979 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:45:05 crc kubenswrapper[4809]: E0930 02:45:05.691683 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:45:06 crc kubenswrapper[4809]: I0930 02:45:06.351208 4809 generic.go:334] "Generic (PLEG): container finished" podID="e712b852-d5e6-4352-8187-df4338325d51" containerID="d79626b044261b9ffacdf9bb8ea41ef110bf2945456ac0a49834d433d163b6d6" exitCode=0 Sep 30 02:45:06 crc kubenswrapper[4809]: I0930 02:45:06.351505 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw" event={"ID":"e712b852-d5e6-4352-8187-df4338325d51","Type":"ContainerDied","Data":"d79626b044261b9ffacdf9bb8ea41ef110bf2945456ac0a49834d433d163b6d6"} Sep 30 02:45:08 crc kubenswrapper[4809]: I0930 02:45:08.025400 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw" Sep 30 02:45:08 crc kubenswrapper[4809]: I0930 02:45:08.128615 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e712b852-d5e6-4352-8187-df4338325d51-secret-volume\") pod \"e712b852-d5e6-4352-8187-df4338325d51\" (UID: \"e712b852-d5e6-4352-8187-df4338325d51\") " Sep 30 02:45:08 crc kubenswrapper[4809]: I0930 02:45:08.128755 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvctl\" (UniqueName: \"kubernetes.io/projected/e712b852-d5e6-4352-8187-df4338325d51-kube-api-access-qvctl\") pod \"e712b852-d5e6-4352-8187-df4338325d51\" (UID: \"e712b852-d5e6-4352-8187-df4338325d51\") " Sep 30 02:45:08 crc kubenswrapper[4809]: I0930 02:45:08.128793 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e712b852-d5e6-4352-8187-df4338325d51-config-volume\") pod \"e712b852-d5e6-4352-8187-df4338325d51\" (UID: \"e712b852-d5e6-4352-8187-df4338325d51\") " Sep 30 02:45:08 crc kubenswrapper[4809]: I0930 02:45:08.129372 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e712b852-d5e6-4352-8187-df4338325d51-config-volume" (OuterVolumeSpecName: "config-volume") pod "e712b852-d5e6-4352-8187-df4338325d51" (UID: "e712b852-d5e6-4352-8187-df4338325d51"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 02:45:08 crc kubenswrapper[4809]: I0930 02:45:08.129941 4809 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e712b852-d5e6-4352-8187-df4338325d51-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 02:45:08 crc kubenswrapper[4809]: I0930 02:45:08.139873 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e712b852-d5e6-4352-8187-df4338325d51-kube-api-access-qvctl" (OuterVolumeSpecName: "kube-api-access-qvctl") pod "e712b852-d5e6-4352-8187-df4338325d51" (UID: "e712b852-d5e6-4352-8187-df4338325d51"). InnerVolumeSpecName "kube-api-access-qvctl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:45:08 crc kubenswrapper[4809]: I0930 02:45:08.157737 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e712b852-d5e6-4352-8187-df4338325d51-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e712b852-d5e6-4352-8187-df4338325d51" (UID: "e712b852-d5e6-4352-8187-df4338325d51"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 02:45:08 crc kubenswrapper[4809]: I0930 02:45:08.231513 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvctl\" (UniqueName: \"kubernetes.io/projected/e712b852-d5e6-4352-8187-df4338325d51-kube-api-access-qvctl\") on node \"crc\" DevicePath \"\"" Sep 30 02:45:08 crc kubenswrapper[4809]: I0930 02:45:08.231548 4809 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e712b852-d5e6-4352-8187-df4338325d51-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 02:45:08 crc kubenswrapper[4809]: I0930 02:45:08.385497 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw" event={"ID":"e712b852-d5e6-4352-8187-df4338325d51","Type":"ContainerDied","Data":"373904e096ce0d2a5673c58b4567a6a00feaf7b43009d400b0965dda5d013187"} Sep 30 02:45:08 crc kubenswrapper[4809]: I0930 02:45:08.385594 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320005-dltmw" Sep 30 02:45:08 crc kubenswrapper[4809]: I0930 02:45:08.385666 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="373904e096ce0d2a5673c58b4567a6a00feaf7b43009d400b0965dda5d013187" Sep 30 02:45:08 crc kubenswrapper[4809]: I0930 02:45:08.472659 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q"] Sep 30 02:45:08 crc kubenswrapper[4809]: I0930 02:45:08.486117 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319960-w795q"] Sep 30 02:45:09 crc kubenswrapper[4809]: I0930 02:45:09.710843 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ce6f380-e28b-4698-8466-b1ba874c2968" path="/var/lib/kubelet/pods/3ce6f380-e28b-4698-8466-b1ba874c2968/volumes" Sep 30 02:45:20 crc kubenswrapper[4809]: I0930 02:45:20.691293 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:45:20 crc kubenswrapper[4809]: E0930 02:45:20.692903 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:45:22 crc kubenswrapper[4809]: I0930 02:45:22.209015 4809 scope.go:117] "RemoveContainer" containerID="a6732ff5d800ccdf98839db5325a5626b2f4149b53f3ee2de91ac945bedd5182" Sep 30 02:45:31 crc kubenswrapper[4809]: I0930 02:45:31.691124 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:45:31 crc kubenswrapper[4809]: E0930 02:45:31.691828 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:45:39 crc kubenswrapper[4809]: I0930 02:45:39.273408 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_4a58ca7e-5f56-4340-85ba-16c0952512ae/aodh-api/0.log" Sep 30 02:45:39 crc kubenswrapper[4809]: I0930 02:45:39.499698 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_4a58ca7e-5f56-4340-85ba-16c0952512ae/aodh-evaluator/0.log" Sep 30 02:45:39 crc kubenswrapper[4809]: I0930 02:45:39.546051 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_4a58ca7e-5f56-4340-85ba-16c0952512ae/aodh-listener/0.log" Sep 30 02:45:39 crc kubenswrapper[4809]: I0930 02:45:39.700603 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_4a58ca7e-5f56-4340-85ba-16c0952512ae/aodh-notifier/0.log" Sep 30 02:45:39 crc kubenswrapper[4809]: I0930 02:45:39.893455 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6c8df69b5b-8qc6t_99531336-93a0-4d03-a774-923e22900476/barbican-api/0.log" Sep 30 02:45:39 crc kubenswrapper[4809]: I0930 02:45:39.956944 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-krcdw"] Sep 30 02:45:39 crc kubenswrapper[4809]: E0930 02:45:39.959993 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e712b852-d5e6-4352-8187-df4338325d51" containerName="collect-profiles" Sep 30 02:45:39 crc kubenswrapper[4809]: I0930 02:45:39.960021 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="e712b852-d5e6-4352-8187-df4338325d51" containerName="collect-profiles" Sep 30 02:45:39 crc kubenswrapper[4809]: I0930 02:45:39.960712 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="e712b852-d5e6-4352-8187-df4338325d51" containerName="collect-profiles" Sep 30 02:45:39 crc kubenswrapper[4809]: I0930 02:45:39.968362 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-krcdw" Sep 30 02:45:39 crc kubenswrapper[4809]: I0930 02:45:39.983923 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-krcdw"] Sep 30 02:45:40 crc kubenswrapper[4809]: I0930 02:45:40.083966 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6c8df69b5b-8qc6t_99531336-93a0-4d03-a774-923e22900476/barbican-api-log/0.log" Sep 30 02:45:40 crc kubenswrapper[4809]: I0930 02:45:40.144805 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8be25087-e2b8-48c3-b87b-c99f26b06102-catalog-content\") pod \"certified-operators-krcdw\" (UID: \"8be25087-e2b8-48c3-b87b-c99f26b06102\") " pod="openshift-marketplace/certified-operators-krcdw" Sep 30 02:45:40 crc kubenswrapper[4809]: I0930 02:45:40.144993 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjr9p\" (UniqueName: \"kubernetes.io/projected/8be25087-e2b8-48c3-b87b-c99f26b06102-kube-api-access-wjr9p\") pod \"certified-operators-krcdw\" (UID: \"8be25087-e2b8-48c3-b87b-c99f26b06102\") " pod="openshift-marketplace/certified-operators-krcdw" Sep 30 02:45:40 crc kubenswrapper[4809]: I0930 02:45:40.145136 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8be25087-e2b8-48c3-b87b-c99f26b06102-utilities\") pod \"certified-operators-krcdw\" (UID: \"8be25087-e2b8-48c3-b87b-c99f26b06102\") " pod="openshift-marketplace/certified-operators-krcdw" Sep 30 02:45:40 crc kubenswrapper[4809]: I0930 02:45:40.247731 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8be25087-e2b8-48c3-b87b-c99f26b06102-utilities\") pod \"certified-operators-krcdw\" (UID: \"8be25087-e2b8-48c3-b87b-c99f26b06102\") " pod="openshift-marketplace/certified-operators-krcdw" Sep 30 02:45:40 crc kubenswrapper[4809]: I0930 02:45:40.247821 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8be25087-e2b8-48c3-b87b-c99f26b06102-catalog-content\") pod \"certified-operators-krcdw\" (UID: \"8be25087-e2b8-48c3-b87b-c99f26b06102\") " pod="openshift-marketplace/certified-operators-krcdw" Sep 30 02:45:40 crc kubenswrapper[4809]: I0930 02:45:40.247919 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjr9p\" (UniqueName: \"kubernetes.io/projected/8be25087-e2b8-48c3-b87b-c99f26b06102-kube-api-access-wjr9p\") pod \"certified-operators-krcdw\" (UID: \"8be25087-e2b8-48c3-b87b-c99f26b06102\") " pod="openshift-marketplace/certified-operators-krcdw" Sep 30 02:45:40 crc kubenswrapper[4809]: I0930 02:45:40.248377 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8be25087-e2b8-48c3-b87b-c99f26b06102-utilities\") pod \"certified-operators-krcdw\" (UID: \"8be25087-e2b8-48c3-b87b-c99f26b06102\") " pod="openshift-marketplace/certified-operators-krcdw" Sep 30 02:45:40 crc kubenswrapper[4809]: I0930 02:45:40.248564 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8be25087-e2b8-48c3-b87b-c99f26b06102-catalog-content\") pod \"certified-operators-krcdw\" (UID: \"8be25087-e2b8-48c3-b87b-c99f26b06102\") " pod="openshift-marketplace/certified-operators-krcdw" Sep 30 02:45:40 crc kubenswrapper[4809]: I0930 02:45:40.278107 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjr9p\" (UniqueName: \"kubernetes.io/projected/8be25087-e2b8-48c3-b87b-c99f26b06102-kube-api-access-wjr9p\") pod \"certified-operators-krcdw\" (UID: \"8be25087-e2b8-48c3-b87b-c99f26b06102\") " pod="openshift-marketplace/certified-operators-krcdw" Sep 30 02:45:40 crc kubenswrapper[4809]: I0930 02:45:40.298687 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-764b94dc8b-rsfhs_69f247bc-7b09-4e36-b749-0889a206162c/barbican-keystone-listener/0.log" Sep 30 02:45:40 crc kubenswrapper[4809]: I0930 02:45:40.308677 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-krcdw" Sep 30 02:45:40 crc kubenswrapper[4809]: I0930 02:45:40.624140 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-764b94dc8b-rsfhs_69f247bc-7b09-4e36-b749-0889a206162c/barbican-keystone-listener-log/0.log" Sep 30 02:45:41 crc kubenswrapper[4809]: I0930 02:45:40.842306 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6b8d67c9f-x6rts_08e2596d-d7a4-46ea-ab20-054084e61605/barbican-worker/0.log" Sep 30 02:45:41 crc kubenswrapper[4809]: I0930 02:45:40.878533 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6b8d67c9f-x6rts_08e2596d-d7a4-46ea-ab20-054084e61605/barbican-worker-log/0.log" Sep 30 02:45:41 crc kubenswrapper[4809]: I0930 02:45:41.128178 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-4q489_4c8e9701-fb27-481f-8572-e7f163487a92/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 02:45:41 crc kubenswrapper[4809]: I0930 02:45:41.427899 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_da43457d-28e7-451d-a937-b7d51e56da99/ceilometer-central-agent/0.log" Sep 30 02:45:41 crc kubenswrapper[4809]: I0930 02:45:41.489236 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_da43457d-28e7-451d-a937-b7d51e56da99/ceilometer-notification-agent/0.log" Sep 30 02:45:41 crc kubenswrapper[4809]: I0930 02:45:41.608576 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_da43457d-28e7-451d-a937-b7d51e56da99/proxy-httpd/0.log" Sep 30 02:45:41 crc kubenswrapper[4809]: I0930 02:45:41.643943 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_da43457d-28e7-451d-a937-b7d51e56da99/sg-core/0.log" Sep 30 02:45:41 crc kubenswrapper[4809]: I0930 02:45:41.775953 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-krcdw"] Sep 30 02:45:41 crc kubenswrapper[4809]: W0930 02:45:41.790937 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8be25087_e2b8_48c3_b87b_c99f26b06102.slice/crio-df69f588f0ab3b749bb7438f7b1a33b76e924b4a343d4509782d49e16878d75b WatchSource:0}: Error finding container df69f588f0ab3b749bb7438f7b1a33b76e924b4a343d4509782d49e16878d75b: Status 404 returned error can't find the container with id df69f588f0ab3b749bb7438f7b1a33b76e924b4a343d4509782d49e16878d75b Sep 30 02:45:41 crc kubenswrapper[4809]: I0930 02:45:41.844323 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-edpm-deployment-openstack-edpm-ipam-s4pp7_31110c5c-0cbc-4a43-a2ff-36c7fd353e5d/ceph-client-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 02:45:41 crc kubenswrapper[4809]: I0930 02:45:41.993135 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9jbjw_7314abce-8cbe-45a0-b6de-9b8a03555fc6/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 02:45:42 crc kubenswrapper[4809]: I0930 02:45:42.194608 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_965477a4-d575-4c80-826b-5ac22f3bfee3/cinder-api-log/0.log" Sep 30 02:45:42 crc kubenswrapper[4809]: I0930 02:45:42.386693 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_965477a4-d575-4c80-826b-5ac22f3bfee3/cinder-api/0.log" Sep 30 02:45:42 crc kubenswrapper[4809]: I0930 02:45:42.593721 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_b4975857-a339-4008-b600-76960da1412a/cinder-backup/0.log" Sep 30 02:45:42 crc kubenswrapper[4809]: I0930 02:45:42.664256 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_b4975857-a339-4008-b600-76960da1412a/probe/0.log" Sep 30 02:45:42 crc kubenswrapper[4809]: I0930 02:45:42.768193 4809 generic.go:334] "Generic (PLEG): container finished" podID="8be25087-e2b8-48c3-b87b-c99f26b06102" containerID="7d3325ce0db663666e87ca50e9e985a4bf243fb0052961cde4517245a38b86f1" exitCode=0 Sep 30 02:45:42 crc kubenswrapper[4809]: I0930 02:45:42.768232 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-krcdw" event={"ID":"8be25087-e2b8-48c3-b87b-c99f26b06102","Type":"ContainerDied","Data":"7d3325ce0db663666e87ca50e9e985a4bf243fb0052961cde4517245a38b86f1"} Sep 30 02:45:42 crc kubenswrapper[4809]: I0930 02:45:42.768255 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-krcdw" event={"ID":"8be25087-e2b8-48c3-b87b-c99f26b06102","Type":"ContainerStarted","Data":"df69f588f0ab3b749bb7438f7b1a33b76e924b4a343d4509782d49e16878d75b"} Sep 30 02:45:42 crc kubenswrapper[4809]: I0930 02:45:42.864706 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_e387977a-8b10-46fb-bd34-0d8212c55fac/cinder-scheduler/0.log" Sep 30 02:45:42 crc kubenswrapper[4809]: I0930 02:45:42.949405 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_e387977a-8b10-46fb-bd34-0d8212c55fac/probe/0.log" Sep 30 02:45:43 crc kubenswrapper[4809]: I0930 02:45:43.142841 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_6747706c-74d5-4d04-8d05-74868b8c1f28/cinder-volume/0.log" Sep 30 02:45:43 crc kubenswrapper[4809]: I0930 02:45:43.207564 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_6747706c-74d5-4d04-8d05-74868b8c1f28/probe/0.log" Sep 30 02:45:43 crc kubenswrapper[4809]: I0930 02:45:43.381216 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-k4ql7_70922d4c-72b5-4c9e-ae16-1fb2d7c15a07/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 02:45:43 crc kubenswrapper[4809]: I0930 02:45:43.482180 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-29s9g_3de84e93-5356-4496-8a35-dc295412042a/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 02:45:43 crc kubenswrapper[4809]: I0930 02:45:43.626524 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-74cfff99f-4rtdc_4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e/init/0.log" Sep 30 02:45:43 crc kubenswrapper[4809]: I0930 02:45:43.691249 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:45:43 crc kubenswrapper[4809]: E0930 02:45:43.691541 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:45:43 crc kubenswrapper[4809]: I0930 02:45:43.808503 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-74cfff99f-4rtdc_4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e/init/0.log" Sep 30 02:45:43 crc kubenswrapper[4809]: I0930 02:45:43.998730 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-74cfff99f-4rtdc_4de13cc2-c89f-45c4-bb69-e86bf6ef6a8e/dnsmasq-dns/0.log" Sep 30 02:45:44 crc kubenswrapper[4809]: I0930 02:45:44.019418 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_3b8e8476-60e6-46aa-804a-67fc467166d2/glance-httpd/0.log" Sep 30 02:45:44 crc kubenswrapper[4809]: I0930 02:45:44.148567 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_3b8e8476-60e6-46aa-804a-67fc467166d2/glance-log/0.log" Sep 30 02:45:44 crc kubenswrapper[4809]: I0930 02:45:44.235306 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_df6124d8-9505-4109-b1dc-a4e58382b4a3/glance-httpd/0.log" Sep 30 02:45:44 crc kubenswrapper[4809]: I0930 02:45:44.503681 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_df6124d8-9505-4109-b1dc-a4e58382b4a3/glance-log/0.log" Sep 30 02:45:44 crc kubenswrapper[4809]: I0930 02:45:44.791833 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-krcdw" event={"ID":"8be25087-e2b8-48c3-b87b-c99f26b06102","Type":"ContainerStarted","Data":"3213c3a95d792d32d86438bef4bc393726efc98ba63c357a6bf4c8da825f540d"} Sep 30 02:45:45 crc kubenswrapper[4809]: I0930 02:45:45.372916 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-6b6bcf86f-4vgsr_2c2cf376-aff0-4120-b4bd-3b3e4076d5e9/heat-engine/0.log" Sep 30 02:45:45 crc kubenswrapper[4809]: I0930 02:45:45.830608 4809 generic.go:334] "Generic (PLEG): container finished" podID="8be25087-e2b8-48c3-b87b-c99f26b06102" containerID="3213c3a95d792d32d86438bef4bc393726efc98ba63c357a6bf4c8da825f540d" exitCode=0 Sep 30 02:45:45 crc kubenswrapper[4809]: I0930 02:45:45.830917 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-krcdw" event={"ID":"8be25087-e2b8-48c3-b87b-c99f26b06102","Type":"ContainerDied","Data":"3213c3a95d792d32d86438bef4bc393726efc98ba63c357a6bf4c8da825f540d"} Sep 30 02:45:45 crc kubenswrapper[4809]: I0930 02:45:45.857132 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-86f457b754-mllm6_f0702a54-83ed-4e06-a9f6-91fd8c106cf4/heat-api/0.log" Sep 30 02:45:46 crc kubenswrapper[4809]: I0930 02:45:46.057853 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5b44f7d95d-84wt2_818908b2-bd0c-49f5-a239-06ae4b04f236/horizon/0.log" Sep 30 02:45:46 crc kubenswrapper[4809]: I0930 02:45:46.415364 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-6786b5c967-grl5d_60c21ffa-346c-4c88-a449-a8b99906421e/heat-cfnapi/0.log" Sep 30 02:45:46 crc kubenswrapper[4809]: I0930 02:45:46.582447 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-fmk2d_cd9b1a91-564f-405f-8fbf-b148dedf9948/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 02:45:46 crc kubenswrapper[4809]: I0930 02:45:46.839242 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5b44f7d95d-84wt2_818908b2-bd0c-49f5-a239-06ae4b04f236/horizon-log/0.log" Sep 30 02:45:46 crc kubenswrapper[4809]: I0930 02:45:46.869352 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-krcdw" event={"ID":"8be25087-e2b8-48c3-b87b-c99f26b06102","Type":"ContainerStarted","Data":"fa84c96fd585e1bd51f4f73b3e6e6d19edcdf5beac9b8ad7101974e605e9bdd1"} Sep 30 02:45:46 crc kubenswrapper[4809]: I0930 02:45:46.924875 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-96dxw_e994ba19-e00a-4362-8866-130d7bb0bd5a/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 02:45:47 crc kubenswrapper[4809]: I0930 02:45:47.154509 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29319901-rfb6f_095e40dd-8c29-4c2f-8089-04eaec05c406/keystone-cron/0.log" Sep 30 02:45:47 crc kubenswrapper[4809]: I0930 02:45:47.423969 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29319961-shptp_816eef70-b240-48ed-93d3-813d38b2fe12/keystone-cron/0.log" Sep 30 02:45:47 crc kubenswrapper[4809]: I0930 02:45:47.537482 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-697fb77f5c-2p8qd_7b313c12-f4aa-4511-a071-8e246d12e74f/keystone-api/0.log" Sep 30 02:45:47 crc kubenswrapper[4809]: I0930 02:45:47.726003 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_75c446e9-7373-4b8a-a7e8-6eb5dcca2fe1/kube-state-metrics/0.log" Sep 30 02:45:47 crc kubenswrapper[4809]: I0930 02:45:47.908064 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-krcdw" podStartSLOduration=5.42300655 podStartE2EDuration="8.908045705s" podCreationTimestamp="2025-09-30 02:45:39 +0000 UTC" firstStartedPulling="2025-09-30 02:45:42.773107271 +0000 UTC m=+9393.809356679" lastFinishedPulling="2025-09-30 02:45:46.258146426 +0000 UTC m=+9397.294395834" observedRunningTime="2025-09-30 02:45:47.898275327 +0000 UTC m=+9398.934524735" watchObservedRunningTime="2025-09-30 02:45:47.908045705 +0000 UTC m=+9398.944295103" Sep 30 02:45:47 crc kubenswrapper[4809]: I0930 02:45:47.963685 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-mgthw_4886a4c8-eb31-4729-9364-86652f1284c8/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 02:45:48 crc kubenswrapper[4809]: I0930 02:45:48.340120 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_logging-edpm-deployment-openstack-edpm-ipam-fp9bn_9ab038a4-a7ad-46da-8479-89b9aa5a30e8/logging-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 02:45:48 crc kubenswrapper[4809]: I0930 02:45:48.620585 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_89f47877-ed2c-4340-9fa4-e69b105d4d9d/manila-api-log/0.log" Sep 30 02:45:48 crc kubenswrapper[4809]: I0930 02:45:48.625536 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_89f47877-ed2c-4340-9fa4-e69b105d4d9d/manila-api/0.log" Sep 30 02:45:48 crc kubenswrapper[4809]: I0930 02:45:48.891361 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_8ad1bd72-2d1a-4de5-891c-573971953aa1/manila-scheduler/0.log" Sep 30 02:45:48 crc kubenswrapper[4809]: I0930 02:45:48.899198 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_8ad1bd72-2d1a-4de5-891c-573971953aa1/probe/0.log" Sep 30 02:45:49 crc kubenswrapper[4809]: I0930 02:45:49.178771 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_f2c82170-5802-4ca9-a749-84618db7b0b5/manila-share/0.log" Sep 30 02:45:49 crc kubenswrapper[4809]: I0930 02:45:49.192612 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_f2c82170-5802-4ca9-a749-84618db7b0b5/probe/0.log" Sep 30 02:45:49 crc kubenswrapper[4809]: I0930 02:45:49.704872 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mysqld-exporter-0_5713929f-9da0-4005-8396-ff4a878a552c/mysqld-exporter/0.log" Sep 30 02:45:50 crc kubenswrapper[4809]: I0930 02:45:50.288895 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-579f9b76b9-drlks_05c44209-faa8-4b7d-a127-9a69b13d8e10/neutron-httpd/0.log" Sep 30 02:45:50 crc kubenswrapper[4809]: I0930 02:45:50.309394 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-krcdw" Sep 30 02:45:50 crc kubenswrapper[4809]: I0930 02:45:50.310720 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-krcdw" Sep 30 02:45:50 crc kubenswrapper[4809]: I0930 02:45:50.344042 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-579f9b76b9-drlks_05c44209-faa8-4b7d-a127-9a69b13d8e10/neutron-api/0.log" Sep 30 02:45:50 crc kubenswrapper[4809]: I0930 02:45:50.372383 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-krcdw" Sep 30 02:45:50 crc kubenswrapper[4809]: I0930 02:45:50.691836 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-hmsb2_6338d96e-b4cf-4390-b303-f7eb46f3e68a/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 02:45:51 crc kubenswrapper[4809]: I0930 02:45:51.880559 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_7388abe1-ffab-4b37-8d32-4677f7ba0412/nova-api-log/0.log" Sep 30 02:45:52 crc kubenswrapper[4809]: I0930 02:45:52.002745 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-krcdw" Sep 30 02:45:52 crc kubenswrapper[4809]: I0930 02:45:52.051555 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-krcdw"] Sep 30 02:45:52 crc kubenswrapper[4809]: I0930 02:45:52.569352 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_7388abe1-ffab-4b37-8d32-4677f7ba0412/nova-api-api/0.log" Sep 30 02:45:52 crc kubenswrapper[4809]: I0930 02:45:52.619692 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_1137ef5b-0b42-40e7-8591-443e77c14e83/nova-cell0-conductor-conductor/0.log" Sep 30 02:45:52 crc kubenswrapper[4809]: I0930 02:45:52.993255 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_5b789d99-904d-412c-9c51-1e40f767385e/nova-cell1-conductor-conductor/0.log" Sep 30 02:45:53 crc kubenswrapper[4809]: I0930 02:45:53.402874 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_90e94897-7fd7-4d70-b20d-1d3f429f3522/nova-cell1-novncproxy-novncproxy/0.log" Sep 30 02:45:53 crc kubenswrapper[4809]: I0930 02:45:53.771710 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kqnxm_a976aeec-864a-445c-8fc2-5e5d53332dce/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 02:45:53 crc kubenswrapper[4809]: I0930 02:45:53.934695 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-krcdw" podUID="8be25087-e2b8-48c3-b87b-c99f26b06102" containerName="registry-server" containerID="cri-o://fa84c96fd585e1bd51f4f73b3e6e6d19edcdf5beac9b8ad7101974e605e9bdd1" gracePeriod=2 Sep 30 02:45:54 crc kubenswrapper[4809]: I0930 02:45:54.140578 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb/nova-metadata-log/0.log" Sep 30 02:45:54 crc kubenswrapper[4809]: I0930 02:45:54.522359 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-krcdw" Sep 30 02:45:54 crc kubenswrapper[4809]: I0930 02:45:54.572817 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8be25087-e2b8-48c3-b87b-c99f26b06102-catalog-content\") pod \"8be25087-e2b8-48c3-b87b-c99f26b06102\" (UID: \"8be25087-e2b8-48c3-b87b-c99f26b06102\") " Sep 30 02:45:54 crc kubenswrapper[4809]: I0930 02:45:54.573046 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjr9p\" (UniqueName: \"kubernetes.io/projected/8be25087-e2b8-48c3-b87b-c99f26b06102-kube-api-access-wjr9p\") pod \"8be25087-e2b8-48c3-b87b-c99f26b06102\" (UID: \"8be25087-e2b8-48c3-b87b-c99f26b06102\") " Sep 30 02:45:54 crc kubenswrapper[4809]: I0930 02:45:54.573125 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8be25087-e2b8-48c3-b87b-c99f26b06102-utilities\") pod \"8be25087-e2b8-48c3-b87b-c99f26b06102\" (UID: \"8be25087-e2b8-48c3-b87b-c99f26b06102\") " Sep 30 02:45:54 crc kubenswrapper[4809]: I0930 02:45:54.574348 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8be25087-e2b8-48c3-b87b-c99f26b06102-utilities" (OuterVolumeSpecName: "utilities") pod "8be25087-e2b8-48c3-b87b-c99f26b06102" (UID: "8be25087-e2b8-48c3-b87b-c99f26b06102"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:45:54 crc kubenswrapper[4809]: I0930 02:45:54.600192 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8be25087-e2b8-48c3-b87b-c99f26b06102-kube-api-access-wjr9p" (OuterVolumeSpecName: "kube-api-access-wjr9p") pod "8be25087-e2b8-48c3-b87b-c99f26b06102" (UID: "8be25087-e2b8-48c3-b87b-c99f26b06102"). InnerVolumeSpecName "kube-api-access-wjr9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:45:54 crc kubenswrapper[4809]: I0930 02:45:54.647627 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8be25087-e2b8-48c3-b87b-c99f26b06102-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8be25087-e2b8-48c3-b87b-c99f26b06102" (UID: "8be25087-e2b8-48c3-b87b-c99f26b06102"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:45:54 crc kubenswrapper[4809]: I0930 02:45:54.674906 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjr9p\" (UniqueName: \"kubernetes.io/projected/8be25087-e2b8-48c3-b87b-c99f26b06102-kube-api-access-wjr9p\") on node \"crc\" DevicePath \"\"" Sep 30 02:45:54 crc kubenswrapper[4809]: I0930 02:45:54.674946 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8be25087-e2b8-48c3-b87b-c99f26b06102-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 02:45:54 crc kubenswrapper[4809]: I0930 02:45:54.674959 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8be25087-e2b8-48c3-b87b-c99f26b06102-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 02:45:54 crc kubenswrapper[4809]: I0930 02:45:54.876614 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_d6904529-09ba-412f-a78d-0afff0e91091/nova-scheduler-scheduler/0.log" Sep 30 02:45:54 crc kubenswrapper[4809]: I0930 02:45:54.948619 4809 generic.go:334] "Generic (PLEG): container finished" podID="8be25087-e2b8-48c3-b87b-c99f26b06102" containerID="fa84c96fd585e1bd51f4f73b3e6e6d19edcdf5beac9b8ad7101974e605e9bdd1" exitCode=0 Sep 30 02:45:54 crc kubenswrapper[4809]: I0930 02:45:54.948761 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-krcdw" event={"ID":"8be25087-e2b8-48c3-b87b-c99f26b06102","Type":"ContainerDied","Data":"fa84c96fd585e1bd51f4f73b3e6e6d19edcdf5beac9b8ad7101974e605e9bdd1"} Sep 30 02:45:54 crc kubenswrapper[4809]: I0930 02:45:54.948798 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-krcdw" event={"ID":"8be25087-e2b8-48c3-b87b-c99f26b06102","Type":"ContainerDied","Data":"df69f588f0ab3b749bb7438f7b1a33b76e924b4a343d4509782d49e16878d75b"} Sep 30 02:45:54 crc kubenswrapper[4809]: I0930 02:45:54.948849 4809 scope.go:117] "RemoveContainer" containerID="fa84c96fd585e1bd51f4f73b3e6e6d19edcdf5beac9b8ad7101974e605e9bdd1" Sep 30 02:45:54 crc kubenswrapper[4809]: I0930 02:45:54.949036 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-krcdw" Sep 30 02:45:54 crc kubenswrapper[4809]: I0930 02:45:54.978182 4809 scope.go:117] "RemoveContainer" containerID="3213c3a95d792d32d86438bef4bc393726efc98ba63c357a6bf4c8da825f540d" Sep 30 02:45:54 crc kubenswrapper[4809]: I0930 02:45:54.994847 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-krcdw"] Sep 30 02:45:55 crc kubenswrapper[4809]: I0930 02:45:55.029901 4809 scope.go:117] "RemoveContainer" containerID="7d3325ce0db663666e87ca50e9e985a4bf243fb0052961cde4517245a38b86f1" Sep 30 02:45:55 crc kubenswrapper[4809]: I0930 02:45:55.061875 4809 scope.go:117] "RemoveContainer" containerID="fa84c96fd585e1bd51f4f73b3e6e6d19edcdf5beac9b8ad7101974e605e9bdd1" Sep 30 02:45:55 crc kubenswrapper[4809]: E0930 02:45:55.063744 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa84c96fd585e1bd51f4f73b3e6e6d19edcdf5beac9b8ad7101974e605e9bdd1\": container with ID starting with fa84c96fd585e1bd51f4f73b3e6e6d19edcdf5beac9b8ad7101974e605e9bdd1 not found: ID does not exist" containerID="fa84c96fd585e1bd51f4f73b3e6e6d19edcdf5beac9b8ad7101974e605e9bdd1" Sep 30 02:45:55 crc kubenswrapper[4809]: I0930 02:45:55.063778 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa84c96fd585e1bd51f4f73b3e6e6d19edcdf5beac9b8ad7101974e605e9bdd1"} err="failed to get container status \"fa84c96fd585e1bd51f4f73b3e6e6d19edcdf5beac9b8ad7101974e605e9bdd1\": rpc error: code = NotFound desc = could not find container \"fa84c96fd585e1bd51f4f73b3e6e6d19edcdf5beac9b8ad7101974e605e9bdd1\": container with ID starting with fa84c96fd585e1bd51f4f73b3e6e6d19edcdf5beac9b8ad7101974e605e9bdd1 not found: ID does not exist" Sep 30 02:45:55 crc kubenswrapper[4809]: I0930 02:45:55.063799 4809 scope.go:117] "RemoveContainer" containerID="3213c3a95d792d32d86438bef4bc393726efc98ba63c357a6bf4c8da825f540d" Sep 30 02:45:55 crc kubenswrapper[4809]: E0930 02:45:55.066307 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3213c3a95d792d32d86438bef4bc393726efc98ba63c357a6bf4c8da825f540d\": container with ID starting with 3213c3a95d792d32d86438bef4bc393726efc98ba63c357a6bf4c8da825f540d not found: ID does not exist" containerID="3213c3a95d792d32d86438bef4bc393726efc98ba63c357a6bf4c8da825f540d" Sep 30 02:45:55 crc kubenswrapper[4809]: I0930 02:45:55.066349 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3213c3a95d792d32d86438bef4bc393726efc98ba63c357a6bf4c8da825f540d"} err="failed to get container status \"3213c3a95d792d32d86438bef4bc393726efc98ba63c357a6bf4c8da825f540d\": rpc error: code = NotFound desc = could not find container \"3213c3a95d792d32d86438bef4bc393726efc98ba63c357a6bf4c8da825f540d\": container with ID starting with 3213c3a95d792d32d86438bef4bc393726efc98ba63c357a6bf4c8da825f540d not found: ID does not exist" Sep 30 02:45:55 crc kubenswrapper[4809]: I0930 02:45:55.066376 4809 scope.go:117] "RemoveContainer" containerID="7d3325ce0db663666e87ca50e9e985a4bf243fb0052961cde4517245a38b86f1" Sep 30 02:45:55 crc kubenswrapper[4809]: I0930 02:45:55.068045 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-krcdw"] Sep 30 02:45:55 crc kubenswrapper[4809]: E0930 02:45:55.068204 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d3325ce0db663666e87ca50e9e985a4bf243fb0052961cde4517245a38b86f1\": container with ID starting with 7d3325ce0db663666e87ca50e9e985a4bf243fb0052961cde4517245a38b86f1 not found: ID does not exist" containerID="7d3325ce0db663666e87ca50e9e985a4bf243fb0052961cde4517245a38b86f1" Sep 30 02:45:55 crc kubenswrapper[4809]: I0930 02:45:55.068244 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d3325ce0db663666e87ca50e9e985a4bf243fb0052961cde4517245a38b86f1"} err="failed to get container status \"7d3325ce0db663666e87ca50e9e985a4bf243fb0052961cde4517245a38b86f1\": rpc error: code = NotFound desc = could not find container \"7d3325ce0db663666e87ca50e9e985a4bf243fb0052961cde4517245a38b86f1\": container with ID starting with 7d3325ce0db663666e87ca50e9e985a4bf243fb0052961cde4517245a38b86f1 not found: ID does not exist" Sep 30 02:45:55 crc kubenswrapper[4809]: I0930 02:45:55.489335 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_f9711a36-f5cf-4143-9ebe-13efdf29aa7a/mysql-bootstrap/0.log" Sep 30 02:45:55 crc kubenswrapper[4809]: I0930 02:45:55.691701 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:45:55 crc kubenswrapper[4809]: I0930 02:45:55.703211 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8be25087-e2b8-48c3-b87b-c99f26b06102" path="/var/lib/kubelet/pods/8be25087-e2b8-48c3-b87b-c99f26b06102/volumes" Sep 30 02:45:55 crc kubenswrapper[4809]: I0930 02:45:55.706168 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_f9711a36-f5cf-4143-9ebe-13efdf29aa7a/mysql-bootstrap/0.log" Sep 30 02:45:55 crc kubenswrapper[4809]: I0930 02:45:55.966490 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"e7e7a8a1146e984f4463d5575237a4af6a6ffa6ae453b322d152ac5a7a1ff3cd"} Sep 30 02:45:56 crc kubenswrapper[4809]: I0930 02:45:56.082147 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_f9711a36-f5cf-4143-9ebe-13efdf29aa7a/galera/0.log" Sep 30 02:45:56 crc kubenswrapper[4809]: I0930 02:45:56.816682 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c5df7664-528d-4076-a69f-bc59afb921a1/mysql-bootstrap/0.log" Sep 30 02:45:57 crc kubenswrapper[4809]: I0930 02:45:57.163962 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c5df7664-528d-4076-a69f-bc59afb921a1/mysql-bootstrap/0.log" Sep 30 02:45:57 crc kubenswrapper[4809]: I0930 02:45:57.461610 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c5df7664-528d-4076-a69f-bc59afb921a1/galera/0.log" Sep 30 02:45:57 crc kubenswrapper[4809]: I0930 02:45:57.939009 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_96d0165f-2a62-4c67-b140-0073b5ef59a0/openstackclient/0.log" Sep 30 02:45:58 crc kubenswrapper[4809]: I0930 02:45:58.423892 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-h8c75_e469bd8b-615c-425a-89b0-2d37b4f738dd/openstack-network-exporter/0.log" Sep 30 02:45:58 crc kubenswrapper[4809]: I0930 02:45:58.598522 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_ed84a4ca-87f9-4ef2-aedb-4feb49d6a0fb/nova-metadata-metadata/0.log" Sep 30 02:45:58 crc kubenswrapper[4809]: I0930 02:45:58.827371 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wgkv7_c028cda5-76ab-45af-b4ab-72332d0471a0/ovsdb-server-init/0.log" Sep 30 02:45:59 crc kubenswrapper[4809]: I0930 02:45:59.094900 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wgkv7_c028cda5-76ab-45af-b4ab-72332d0471a0/ovsdb-server-init/0.log" Sep 30 02:45:59 crc kubenswrapper[4809]: I0930 02:45:59.104623 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wgkv7_c028cda5-76ab-45af-b4ab-72332d0471a0/ovs-vswitchd/0.log" Sep 30 02:45:59 crc kubenswrapper[4809]: I0930 02:45:59.372176 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wgkv7_c028cda5-76ab-45af-b4ab-72332d0471a0/ovsdb-server/0.log" Sep 30 02:45:59 crc kubenswrapper[4809]: I0930 02:45:59.540432 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-swbnb_40f19224-d223-4f7c-ad39-3afc9a8c18b1/ovn-controller/0.log" Sep 30 02:45:59 crc kubenswrapper[4809]: I0930 02:45:59.818758 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-7nrfj_16525fbd-5a20-46fc-a7d2-95860193d091/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 02:46:00 crc kubenswrapper[4809]: I0930 02:46:00.084459 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5/openstack-network-exporter/0.log" Sep 30 02:46:00 crc kubenswrapper[4809]: I0930 02:46:00.173730 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a1f782bb-b1ea-4a71-8ca4-2f9ada1e2fd5/ovn-northd/0.log" Sep 30 02:46:00 crc kubenswrapper[4809]: I0930 02:46:00.411687 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3c991006-090a-45ae-afec-388e497dd0ad/openstack-network-exporter/0.log" Sep 30 02:46:00 crc kubenswrapper[4809]: I0930 02:46:00.516342 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3c991006-090a-45ae-afec-388e497dd0ad/ovsdbserver-nb/0.log" Sep 30 02:46:00 crc kubenswrapper[4809]: I0930 02:46:00.726163 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_279ac629-91a5-4dcc-9c7a-a8e64b4a1874/memcached/0.log" Sep 30 02:46:00 crc kubenswrapper[4809]: I0930 02:46:00.918332 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_2cfa1ec3-7355-4a69-a63f-ee850e1e8e79/openstack-network-exporter/0.log" Sep 30 02:46:01 crc kubenswrapper[4809]: I0930 02:46:01.031929 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_2cfa1ec3-7355-4a69-a63f-ee850e1e8e79/ovsdbserver-sb/0.log" Sep 30 02:46:01 crc kubenswrapper[4809]: I0930 02:46:01.234991 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7f6c7c986d-hgzt9_945e30a4-7c16-4109-9240-16a3383dc4ba/placement-api/0.log" Sep 30 02:46:01 crc kubenswrapper[4809]: I0930 02:46:01.389866 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7f6c7c986d-hgzt9_945e30a4-7c16-4109-9240-16a3383dc4ba/placement-log/0.log" Sep 30 02:46:01 crc kubenswrapper[4809]: I0930 02:46:01.490407 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_33bc071b-ab55-4bea-a4b2-351c18e716e7/init-config-reloader/0.log" Sep 30 02:46:01 crc kubenswrapper[4809]: I0930 02:46:01.652495 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_33bc071b-ab55-4bea-a4b2-351c18e716e7/init-config-reloader/0.log" Sep 30 02:46:01 crc kubenswrapper[4809]: I0930 02:46:01.654251 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_33bc071b-ab55-4bea-a4b2-351c18e716e7/config-reloader/0.log" Sep 30 02:46:01 crc kubenswrapper[4809]: I0930 02:46:01.694317 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_33bc071b-ab55-4bea-a4b2-351c18e716e7/prometheus/0.log" Sep 30 02:46:01 crc kubenswrapper[4809]: I0930 02:46:01.852798 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_33bc071b-ab55-4bea-a4b2-351c18e716e7/thanos-sidecar/0.log" Sep 30 02:46:01 crc kubenswrapper[4809]: I0930 02:46:01.951305 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_3e11f1c6-6535-453c-86b9-98c1ba7abd72/setup-container/0.log" Sep 30 02:46:02 crc kubenswrapper[4809]: I0930 02:46:02.113123 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_3e11f1c6-6535-453c-86b9-98c1ba7abd72/rabbitmq/0.log" Sep 30 02:46:02 crc kubenswrapper[4809]: I0930 02:46:02.152086 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_3e11f1c6-6535-453c-86b9-98c1ba7abd72/setup-container/0.log" Sep 30 02:46:02 crc kubenswrapper[4809]: I0930 02:46:02.369297 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_69e69cac-c659-4973-a220-82c222df7c35/setup-container/0.log" Sep 30 02:46:02 crc kubenswrapper[4809]: I0930 02:46:02.502398 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_69e69cac-c659-4973-a220-82c222df7c35/setup-container/0.log" Sep 30 02:46:02 crc kubenswrapper[4809]: I0930 02:46:02.767871 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-q6v9m_deddce34-8531-4212-84b9-38ae8445dd7c/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 02:46:02 crc kubenswrapper[4809]: I0930 02:46:02.807934 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_69e69cac-c659-4973-a220-82c222df7c35/rabbitmq/0.log" Sep 30 02:46:02 crc kubenswrapper[4809]: I0930 02:46:02.947953 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-hsbkl_26c07bd8-9cdc-4d44-82b9-d76f51b9ce27/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 02:46:03 crc kubenswrapper[4809]: I0930 02:46:03.073421 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-t5q7r_3f31de59-0674-4a21-bfff-74c55467ca17/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 02:46:03 crc kubenswrapper[4809]: I0930 02:46:03.259230 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-c8q95_189c7ce2-ea68-4463-86d4-b0ddb3e6e8c3/ssh-known-hosts-edpm-deployment/0.log" Sep 30 02:46:03 crc kubenswrapper[4809]: I0930 02:46:03.503858 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-76dd7988df-8d5vk_0f29e497-2911-4dc0-8a6d-b59c8d254f60/proxy-server/0.log" Sep 30 02:46:03 crc kubenswrapper[4809]: I0930 02:46:03.533957 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-rxwsm_be3e3acf-e816-4c7d-a70e-1f51262676be/swift-ring-rebalance/0.log" Sep 30 02:46:03 crc kubenswrapper[4809]: I0930 02:46:03.638694 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-76dd7988df-8d5vk_0f29e497-2911-4dc0-8a6d-b59c8d254f60/proxy-httpd/0.log" Sep 30 02:46:03 crc kubenswrapper[4809]: I0930 02:46:03.768382 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f/account-auditor/0.log" Sep 30 02:46:03 crc kubenswrapper[4809]: I0930 02:46:03.824823 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f/account-reaper/0.log" Sep 30 02:46:03 crc kubenswrapper[4809]: I0930 02:46:03.942477 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f/account-replicator/0.log" Sep 30 02:46:03 crc kubenswrapper[4809]: I0930 02:46:03.974389 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f/account-server/0.log" Sep 30 02:46:04 crc kubenswrapper[4809]: I0930 02:46:04.065868 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f/container-auditor/0.log" Sep 30 02:46:04 crc kubenswrapper[4809]: I0930 02:46:04.067676 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f/container-replicator/0.log" Sep 30 02:46:04 crc kubenswrapper[4809]: I0930 02:46:04.139045 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f/container-server/0.log" Sep 30 02:46:04 crc kubenswrapper[4809]: I0930 02:46:04.177557 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f/container-updater/0.log" Sep 30 02:46:04 crc kubenswrapper[4809]: I0930 02:46:04.290318 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f/object-expirer/0.log" Sep 30 02:46:04 crc kubenswrapper[4809]: I0930 02:46:04.313346 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f/object-auditor/0.log" Sep 30 02:46:04 crc kubenswrapper[4809]: I0930 02:46:04.355718 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f/object-replicator/0.log" Sep 30 02:46:04 crc kubenswrapper[4809]: I0930 02:46:04.407708 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f/object-server/0.log" Sep 30 02:46:04 crc kubenswrapper[4809]: I0930 02:46:04.488695 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f/rsync/0.log" Sep 30 02:46:04 crc kubenswrapper[4809]: I0930 02:46:04.496351 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f/object-updater/0.log" Sep 30 02:46:04 crc kubenswrapper[4809]: I0930 02:46:04.589127 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8f2a0baa-5f99-4c6d-a1bd-9fc7cadb1b8f/swift-recon-cron/0.log" Sep 30 02:46:04 crc kubenswrapper[4809]: I0930 02:46:04.731132 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-mzf87_a004127c-f068-4f63-89a4-689cfec52df1/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 02:46:04 crc kubenswrapper[4809]: I0930 02:46:04.857144 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-power-monitoring-edpm-deployment-openstack-edpm-kqxxq_87824ee6-3fd5-4157-afdb-4e524127dae8/telemetry-power-monitoring-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 02:46:05 crc kubenswrapper[4809]: I0930 02:46:05.201078 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_92b038e4-311d-43cc-962a-384be228c12d/test-operator-logs-container/0.log" Sep 30 02:46:05 crc kubenswrapper[4809]: I0930 02:46:05.454955 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_015d3aad-2e8e-4491-bf81-88058e25fe55/tempest-tests-tempest-tests-runner/0.log" Sep 30 02:46:05 crc kubenswrapper[4809]: I0930 02:46:05.529207 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-jm6cb_798a0c6b-ac60-4345-8359-e3db0a97744c/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 02:47:13 crc kubenswrapper[4809]: I0930 02:47:13.912199 4809 generic.go:334] "Generic (PLEG): container finished" podID="6440d880-96a1-40ed-9b9f-c0b2a08eecb6" containerID="41697243e9d05105232c9cc2eb41650b860aa5aa1ed74e93e9363d4e47941d39" exitCode=0 Sep 30 02:47:13 crc kubenswrapper[4809]: I0930 02:47:13.912236 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cwlbq/crc-debug-x8xk6" event={"ID":"6440d880-96a1-40ed-9b9f-c0b2a08eecb6","Type":"ContainerDied","Data":"41697243e9d05105232c9cc2eb41650b860aa5aa1ed74e93e9363d4e47941d39"} Sep 30 02:47:15 crc kubenswrapper[4809]: I0930 02:47:15.084869 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cwlbq/crc-debug-x8xk6" Sep 30 02:47:15 crc kubenswrapper[4809]: I0930 02:47:15.134836 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-cwlbq/crc-debug-x8xk6"] Sep 30 02:47:15 crc kubenswrapper[4809]: I0930 02:47:15.147960 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-cwlbq/crc-debug-x8xk6"] Sep 30 02:47:15 crc kubenswrapper[4809]: I0930 02:47:15.164247 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6440d880-96a1-40ed-9b9f-c0b2a08eecb6-host\") pod \"6440d880-96a1-40ed-9b9f-c0b2a08eecb6\" (UID: \"6440d880-96a1-40ed-9b9f-c0b2a08eecb6\") " Sep 30 02:47:15 crc kubenswrapper[4809]: I0930 02:47:15.164545 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6440d880-96a1-40ed-9b9f-c0b2a08eecb6-host" (OuterVolumeSpecName: "host") pod "6440d880-96a1-40ed-9b9f-c0b2a08eecb6" (UID: "6440d880-96a1-40ed-9b9f-c0b2a08eecb6"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 02:47:15 crc kubenswrapper[4809]: I0930 02:47:15.164630 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vm577\" (UniqueName: \"kubernetes.io/projected/6440d880-96a1-40ed-9b9f-c0b2a08eecb6-kube-api-access-vm577\") pod \"6440d880-96a1-40ed-9b9f-c0b2a08eecb6\" (UID: \"6440d880-96a1-40ed-9b9f-c0b2a08eecb6\") " Sep 30 02:47:15 crc kubenswrapper[4809]: I0930 02:47:15.165838 4809 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6440d880-96a1-40ed-9b9f-c0b2a08eecb6-host\") on node \"crc\" DevicePath \"\"" Sep 30 02:47:15 crc kubenswrapper[4809]: I0930 02:47:15.181269 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6440d880-96a1-40ed-9b9f-c0b2a08eecb6-kube-api-access-vm577" (OuterVolumeSpecName: "kube-api-access-vm577") pod "6440d880-96a1-40ed-9b9f-c0b2a08eecb6" (UID: "6440d880-96a1-40ed-9b9f-c0b2a08eecb6"). InnerVolumeSpecName "kube-api-access-vm577". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:47:15 crc kubenswrapper[4809]: I0930 02:47:15.269334 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vm577\" (UniqueName: \"kubernetes.io/projected/6440d880-96a1-40ed-9b9f-c0b2a08eecb6-kube-api-access-vm577\") on node \"crc\" DevicePath \"\"" Sep 30 02:47:15 crc kubenswrapper[4809]: I0930 02:47:15.709365 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6440d880-96a1-40ed-9b9f-c0b2a08eecb6" path="/var/lib/kubelet/pods/6440d880-96a1-40ed-9b9f-c0b2a08eecb6/volumes" Sep 30 02:47:15 crc kubenswrapper[4809]: I0930 02:47:15.949981 4809 scope.go:117] "RemoveContainer" containerID="41697243e9d05105232c9cc2eb41650b860aa5aa1ed74e93e9363d4e47941d39" Sep 30 02:47:15 crc kubenswrapper[4809]: I0930 02:47:15.950177 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cwlbq/crc-debug-x8xk6" Sep 30 02:47:16 crc kubenswrapper[4809]: I0930 02:47:16.330068 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-cwlbq/crc-debug-f5xxf"] Sep 30 02:47:16 crc kubenswrapper[4809]: E0930 02:47:16.330542 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6440d880-96a1-40ed-9b9f-c0b2a08eecb6" containerName="container-00" Sep 30 02:47:16 crc kubenswrapper[4809]: I0930 02:47:16.330556 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="6440d880-96a1-40ed-9b9f-c0b2a08eecb6" containerName="container-00" Sep 30 02:47:16 crc kubenswrapper[4809]: E0930 02:47:16.330582 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8be25087-e2b8-48c3-b87b-c99f26b06102" containerName="extract-utilities" Sep 30 02:47:16 crc kubenswrapper[4809]: I0930 02:47:16.330589 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="8be25087-e2b8-48c3-b87b-c99f26b06102" containerName="extract-utilities" Sep 30 02:47:16 crc kubenswrapper[4809]: E0930 02:47:16.330618 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8be25087-e2b8-48c3-b87b-c99f26b06102" containerName="registry-server" Sep 30 02:47:16 crc kubenswrapper[4809]: I0930 02:47:16.330624 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="8be25087-e2b8-48c3-b87b-c99f26b06102" containerName="registry-server" Sep 30 02:47:16 crc kubenswrapper[4809]: E0930 02:47:16.330634 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8be25087-e2b8-48c3-b87b-c99f26b06102" containerName="extract-content" Sep 30 02:47:16 crc kubenswrapper[4809]: I0930 02:47:16.330658 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="8be25087-e2b8-48c3-b87b-c99f26b06102" containerName="extract-content" Sep 30 02:47:16 crc kubenswrapper[4809]: I0930 02:47:16.330856 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="8be25087-e2b8-48c3-b87b-c99f26b06102" containerName="registry-server" Sep 30 02:47:16 crc kubenswrapper[4809]: I0930 02:47:16.330881 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="6440d880-96a1-40ed-9b9f-c0b2a08eecb6" containerName="container-00" Sep 30 02:47:16 crc kubenswrapper[4809]: I0930 02:47:16.332115 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cwlbq/crc-debug-f5xxf" Sep 30 02:47:16 crc kubenswrapper[4809]: I0930 02:47:16.397872 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4f64e367-2b1c-40d6-b51f-c6331911d7e0-host\") pod \"crc-debug-f5xxf\" (UID: \"4f64e367-2b1c-40d6-b51f-c6331911d7e0\") " pod="openshift-must-gather-cwlbq/crc-debug-f5xxf" Sep 30 02:47:16 crc kubenswrapper[4809]: I0930 02:47:16.397943 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svrpk\" (UniqueName: \"kubernetes.io/projected/4f64e367-2b1c-40d6-b51f-c6331911d7e0-kube-api-access-svrpk\") pod \"crc-debug-f5xxf\" (UID: \"4f64e367-2b1c-40d6-b51f-c6331911d7e0\") " pod="openshift-must-gather-cwlbq/crc-debug-f5xxf" Sep 30 02:47:16 crc kubenswrapper[4809]: I0930 02:47:16.499907 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4f64e367-2b1c-40d6-b51f-c6331911d7e0-host\") pod \"crc-debug-f5xxf\" (UID: \"4f64e367-2b1c-40d6-b51f-c6331911d7e0\") " pod="openshift-must-gather-cwlbq/crc-debug-f5xxf" Sep 30 02:47:16 crc kubenswrapper[4809]: I0930 02:47:16.500261 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svrpk\" (UniqueName: \"kubernetes.io/projected/4f64e367-2b1c-40d6-b51f-c6331911d7e0-kube-api-access-svrpk\") pod \"crc-debug-f5xxf\" (UID: \"4f64e367-2b1c-40d6-b51f-c6331911d7e0\") " pod="openshift-must-gather-cwlbq/crc-debug-f5xxf" Sep 30 02:47:16 crc kubenswrapper[4809]: I0930 02:47:16.508444 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4f64e367-2b1c-40d6-b51f-c6331911d7e0-host\") pod \"crc-debug-f5xxf\" (UID: \"4f64e367-2b1c-40d6-b51f-c6331911d7e0\") " pod="openshift-must-gather-cwlbq/crc-debug-f5xxf" Sep 30 02:47:16 crc kubenswrapper[4809]: I0930 02:47:16.525967 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svrpk\" (UniqueName: \"kubernetes.io/projected/4f64e367-2b1c-40d6-b51f-c6331911d7e0-kube-api-access-svrpk\") pod \"crc-debug-f5xxf\" (UID: \"4f64e367-2b1c-40d6-b51f-c6331911d7e0\") " pod="openshift-must-gather-cwlbq/crc-debug-f5xxf" Sep 30 02:47:16 crc kubenswrapper[4809]: I0930 02:47:16.655779 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cwlbq/crc-debug-f5xxf" Sep 30 02:47:16 crc kubenswrapper[4809]: I0930 02:47:16.988963 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cwlbq/crc-debug-f5xxf" event={"ID":"4f64e367-2b1c-40d6-b51f-c6331911d7e0","Type":"ContainerStarted","Data":"a3dd9245dee7c792c7d275968bea587c38b632ea1378882dacb99b40498ddadb"} Sep 30 02:47:18 crc kubenswrapper[4809]: I0930 02:47:18.011976 4809 generic.go:334] "Generic (PLEG): container finished" podID="4f64e367-2b1c-40d6-b51f-c6331911d7e0" containerID="2f956dc7c7cc1cf14f93114e4d5b86bafee2b53405e4b05b3afa5a8e86973f3c" exitCode=0 Sep 30 02:47:18 crc kubenswrapper[4809]: I0930 02:47:18.012044 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cwlbq/crc-debug-f5xxf" event={"ID":"4f64e367-2b1c-40d6-b51f-c6331911d7e0","Type":"ContainerDied","Data":"2f956dc7c7cc1cf14f93114e4d5b86bafee2b53405e4b05b3afa5a8e86973f3c"} Sep 30 02:47:19 crc kubenswrapper[4809]: I0930 02:47:19.163093 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cwlbq/crc-debug-f5xxf" Sep 30 02:47:19 crc kubenswrapper[4809]: I0930 02:47:19.263008 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svrpk\" (UniqueName: \"kubernetes.io/projected/4f64e367-2b1c-40d6-b51f-c6331911d7e0-kube-api-access-svrpk\") pod \"4f64e367-2b1c-40d6-b51f-c6331911d7e0\" (UID: \"4f64e367-2b1c-40d6-b51f-c6331911d7e0\") " Sep 30 02:47:19 crc kubenswrapper[4809]: I0930 02:47:19.263061 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4f64e367-2b1c-40d6-b51f-c6331911d7e0-host\") pod \"4f64e367-2b1c-40d6-b51f-c6331911d7e0\" (UID: \"4f64e367-2b1c-40d6-b51f-c6331911d7e0\") " Sep 30 02:47:19 crc kubenswrapper[4809]: I0930 02:47:19.263245 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4f64e367-2b1c-40d6-b51f-c6331911d7e0-host" (OuterVolumeSpecName: "host") pod "4f64e367-2b1c-40d6-b51f-c6331911d7e0" (UID: "4f64e367-2b1c-40d6-b51f-c6331911d7e0"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 02:47:19 crc kubenswrapper[4809]: I0930 02:47:19.263841 4809 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4f64e367-2b1c-40d6-b51f-c6331911d7e0-host\") on node \"crc\" DevicePath \"\"" Sep 30 02:47:19 crc kubenswrapper[4809]: I0930 02:47:19.270727 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f64e367-2b1c-40d6-b51f-c6331911d7e0-kube-api-access-svrpk" (OuterVolumeSpecName: "kube-api-access-svrpk") pod "4f64e367-2b1c-40d6-b51f-c6331911d7e0" (UID: "4f64e367-2b1c-40d6-b51f-c6331911d7e0"). InnerVolumeSpecName "kube-api-access-svrpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:47:19 crc kubenswrapper[4809]: I0930 02:47:19.366777 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svrpk\" (UniqueName: \"kubernetes.io/projected/4f64e367-2b1c-40d6-b51f-c6331911d7e0-kube-api-access-svrpk\") on node \"crc\" DevicePath \"\"" Sep 30 02:47:20 crc kubenswrapper[4809]: I0930 02:47:20.057476 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cwlbq/crc-debug-f5xxf" event={"ID":"4f64e367-2b1c-40d6-b51f-c6331911d7e0","Type":"ContainerDied","Data":"a3dd9245dee7c792c7d275968bea587c38b632ea1378882dacb99b40498ddadb"} Sep 30 02:47:20 crc kubenswrapper[4809]: I0930 02:47:20.058063 4809 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a3dd9245dee7c792c7d275968bea587c38b632ea1378882dacb99b40498ddadb" Sep 30 02:47:20 crc kubenswrapper[4809]: I0930 02:47:20.057541 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cwlbq/crc-debug-f5xxf" Sep 30 02:47:29 crc kubenswrapper[4809]: I0930 02:47:29.587224 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-cwlbq/crc-debug-f5xxf"] Sep 30 02:47:29 crc kubenswrapper[4809]: I0930 02:47:29.596238 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-cwlbq/crc-debug-f5xxf"] Sep 30 02:47:29 crc kubenswrapper[4809]: I0930 02:47:29.704824 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f64e367-2b1c-40d6-b51f-c6331911d7e0" path="/var/lib/kubelet/pods/4f64e367-2b1c-40d6-b51f-c6331911d7e0/volumes" Sep 30 02:47:30 crc kubenswrapper[4809]: I0930 02:47:30.774436 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-cwlbq/crc-debug-7mg7x"] Sep 30 02:47:30 crc kubenswrapper[4809]: E0930 02:47:30.774981 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f64e367-2b1c-40d6-b51f-c6331911d7e0" containerName="container-00" Sep 30 02:47:30 crc kubenswrapper[4809]: I0930 02:47:30.774997 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f64e367-2b1c-40d6-b51f-c6331911d7e0" containerName="container-00" Sep 30 02:47:30 crc kubenswrapper[4809]: I0930 02:47:30.775223 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f64e367-2b1c-40d6-b51f-c6331911d7e0" containerName="container-00" Sep 30 02:47:30 crc kubenswrapper[4809]: I0930 02:47:30.776061 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cwlbq/crc-debug-7mg7x" Sep 30 02:47:30 crc kubenswrapper[4809]: I0930 02:47:30.895132 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnmgt\" (UniqueName: \"kubernetes.io/projected/eab975ea-8907-4a78-bde4-8a4568b79aa5-kube-api-access-wnmgt\") pod \"crc-debug-7mg7x\" (UID: \"eab975ea-8907-4a78-bde4-8a4568b79aa5\") " pod="openshift-must-gather-cwlbq/crc-debug-7mg7x" Sep 30 02:47:30 crc kubenswrapper[4809]: I0930 02:47:30.895417 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/eab975ea-8907-4a78-bde4-8a4568b79aa5-host\") pod \"crc-debug-7mg7x\" (UID: \"eab975ea-8907-4a78-bde4-8a4568b79aa5\") " pod="openshift-must-gather-cwlbq/crc-debug-7mg7x" Sep 30 02:47:30 crc kubenswrapper[4809]: I0930 02:47:30.998567 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnmgt\" (UniqueName: \"kubernetes.io/projected/eab975ea-8907-4a78-bde4-8a4568b79aa5-kube-api-access-wnmgt\") pod \"crc-debug-7mg7x\" (UID: \"eab975ea-8907-4a78-bde4-8a4568b79aa5\") " pod="openshift-must-gather-cwlbq/crc-debug-7mg7x" Sep 30 02:47:30 crc kubenswrapper[4809]: I0930 02:47:30.998636 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/eab975ea-8907-4a78-bde4-8a4568b79aa5-host\") pod \"crc-debug-7mg7x\" (UID: \"eab975ea-8907-4a78-bde4-8a4568b79aa5\") " pod="openshift-must-gather-cwlbq/crc-debug-7mg7x" Sep 30 02:47:30 crc kubenswrapper[4809]: I0930 02:47:30.998846 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/eab975ea-8907-4a78-bde4-8a4568b79aa5-host\") pod \"crc-debug-7mg7x\" (UID: \"eab975ea-8907-4a78-bde4-8a4568b79aa5\") " pod="openshift-must-gather-cwlbq/crc-debug-7mg7x" Sep 30 02:47:31 crc kubenswrapper[4809]: I0930 02:47:31.038881 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnmgt\" (UniqueName: \"kubernetes.io/projected/eab975ea-8907-4a78-bde4-8a4568b79aa5-kube-api-access-wnmgt\") pod \"crc-debug-7mg7x\" (UID: \"eab975ea-8907-4a78-bde4-8a4568b79aa5\") " pod="openshift-must-gather-cwlbq/crc-debug-7mg7x" Sep 30 02:47:31 crc kubenswrapper[4809]: I0930 02:47:31.099298 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cwlbq/crc-debug-7mg7x" Sep 30 02:47:31 crc kubenswrapper[4809]: I0930 02:47:31.170604 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cwlbq/crc-debug-7mg7x" event={"ID":"eab975ea-8907-4a78-bde4-8a4568b79aa5","Type":"ContainerStarted","Data":"3fe6624b4819704ee5bd800c8ca2e3c7ebb1d16504f11973661a6ce0bdbbfbfe"} Sep 30 02:47:32 crc kubenswrapper[4809]: I0930 02:47:32.186937 4809 generic.go:334] "Generic (PLEG): container finished" podID="eab975ea-8907-4a78-bde4-8a4568b79aa5" containerID="c81a3b12a8dcafdb89a19979fee497c85e60f5f1a247bb33fc870d850018535d" exitCode=0 Sep 30 02:47:32 crc kubenswrapper[4809]: I0930 02:47:32.187108 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cwlbq/crc-debug-7mg7x" event={"ID":"eab975ea-8907-4a78-bde4-8a4568b79aa5","Type":"ContainerDied","Data":"c81a3b12a8dcafdb89a19979fee497c85e60f5f1a247bb33fc870d850018535d"} Sep 30 02:47:32 crc kubenswrapper[4809]: I0930 02:47:32.245771 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-cwlbq/crc-debug-7mg7x"] Sep 30 02:47:32 crc kubenswrapper[4809]: I0930 02:47:32.256192 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-cwlbq/crc-debug-7mg7x"] Sep 30 02:47:33 crc kubenswrapper[4809]: I0930 02:47:33.320229 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cwlbq/crc-debug-7mg7x" Sep 30 02:47:33 crc kubenswrapper[4809]: I0930 02:47:33.357835 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/eab975ea-8907-4a78-bde4-8a4568b79aa5-host\") pod \"eab975ea-8907-4a78-bde4-8a4568b79aa5\" (UID: \"eab975ea-8907-4a78-bde4-8a4568b79aa5\") " Sep 30 02:47:33 crc kubenswrapper[4809]: I0930 02:47:33.357967 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wnmgt\" (UniqueName: \"kubernetes.io/projected/eab975ea-8907-4a78-bde4-8a4568b79aa5-kube-api-access-wnmgt\") pod \"eab975ea-8907-4a78-bde4-8a4568b79aa5\" (UID: \"eab975ea-8907-4a78-bde4-8a4568b79aa5\") " Sep 30 02:47:33 crc kubenswrapper[4809]: I0930 02:47:33.358052 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/eab975ea-8907-4a78-bde4-8a4568b79aa5-host" (OuterVolumeSpecName: "host") pod "eab975ea-8907-4a78-bde4-8a4568b79aa5" (UID: "eab975ea-8907-4a78-bde4-8a4568b79aa5"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 02:47:33 crc kubenswrapper[4809]: I0930 02:47:33.358684 4809 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/eab975ea-8907-4a78-bde4-8a4568b79aa5-host\") on node \"crc\" DevicePath \"\"" Sep 30 02:47:33 crc kubenswrapper[4809]: I0930 02:47:33.373081 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eab975ea-8907-4a78-bde4-8a4568b79aa5-kube-api-access-wnmgt" (OuterVolumeSpecName: "kube-api-access-wnmgt") pod "eab975ea-8907-4a78-bde4-8a4568b79aa5" (UID: "eab975ea-8907-4a78-bde4-8a4568b79aa5"). InnerVolumeSpecName "kube-api-access-wnmgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:47:33 crc kubenswrapper[4809]: I0930 02:47:33.460698 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wnmgt\" (UniqueName: \"kubernetes.io/projected/eab975ea-8907-4a78-bde4-8a4568b79aa5-kube-api-access-wnmgt\") on node \"crc\" DevicePath \"\"" Sep 30 02:47:33 crc kubenswrapper[4809]: I0930 02:47:33.704272 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eab975ea-8907-4a78-bde4-8a4568b79aa5" path="/var/lib/kubelet/pods/eab975ea-8907-4a78-bde4-8a4568b79aa5/volumes" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.143743 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jv6gn"] Sep 30 02:47:34 crc kubenswrapper[4809]: E0930 02:47:34.144182 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eab975ea-8907-4a78-bde4-8a4568b79aa5" containerName="container-00" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.144198 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="eab975ea-8907-4a78-bde4-8a4568b79aa5" containerName="container-00" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.144405 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="eab975ea-8907-4a78-bde4-8a4568b79aa5" containerName="container-00" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.146955 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jv6gn" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.149742 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx_ed080f95-063a-4f19-bb10-7153531ca913/util/0.log" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.164087 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jv6gn"] Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.177847 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prtc2\" (UniqueName: \"kubernetes.io/projected/bb45b82d-b194-4e03-981a-9de9fdd23e0a-kube-api-access-prtc2\") pod \"redhat-marketplace-jv6gn\" (UID: \"bb45b82d-b194-4e03-981a-9de9fdd23e0a\") " pod="openshift-marketplace/redhat-marketplace-jv6gn" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.178381 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb45b82d-b194-4e03-981a-9de9fdd23e0a-utilities\") pod \"redhat-marketplace-jv6gn\" (UID: \"bb45b82d-b194-4e03-981a-9de9fdd23e0a\") " pod="openshift-marketplace/redhat-marketplace-jv6gn" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.178438 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb45b82d-b194-4e03-981a-9de9fdd23e0a-catalog-content\") pod \"redhat-marketplace-jv6gn\" (UID: \"bb45b82d-b194-4e03-981a-9de9fdd23e0a\") " pod="openshift-marketplace/redhat-marketplace-jv6gn" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.207481 4809 scope.go:117] "RemoveContainer" containerID="c81a3b12a8dcafdb89a19979fee497c85e60f5f1a247bb33fc870d850018535d" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.207625 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cwlbq/crc-debug-7mg7x" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.279840 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prtc2\" (UniqueName: \"kubernetes.io/projected/bb45b82d-b194-4e03-981a-9de9fdd23e0a-kube-api-access-prtc2\") pod \"redhat-marketplace-jv6gn\" (UID: \"bb45b82d-b194-4e03-981a-9de9fdd23e0a\") " pod="openshift-marketplace/redhat-marketplace-jv6gn" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.280340 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb45b82d-b194-4e03-981a-9de9fdd23e0a-utilities\") pod \"redhat-marketplace-jv6gn\" (UID: \"bb45b82d-b194-4e03-981a-9de9fdd23e0a\") " pod="openshift-marketplace/redhat-marketplace-jv6gn" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.280764 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb45b82d-b194-4e03-981a-9de9fdd23e0a-catalog-content\") pod \"redhat-marketplace-jv6gn\" (UID: \"bb45b82d-b194-4e03-981a-9de9fdd23e0a\") " pod="openshift-marketplace/redhat-marketplace-jv6gn" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.282043 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb45b82d-b194-4e03-981a-9de9fdd23e0a-utilities\") pod \"redhat-marketplace-jv6gn\" (UID: \"bb45b82d-b194-4e03-981a-9de9fdd23e0a\") " pod="openshift-marketplace/redhat-marketplace-jv6gn" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.282194 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb45b82d-b194-4e03-981a-9de9fdd23e0a-catalog-content\") pod \"redhat-marketplace-jv6gn\" (UID: \"bb45b82d-b194-4e03-981a-9de9fdd23e0a\") " pod="openshift-marketplace/redhat-marketplace-jv6gn" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.302054 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prtc2\" (UniqueName: \"kubernetes.io/projected/bb45b82d-b194-4e03-981a-9de9fdd23e0a-kube-api-access-prtc2\") pod \"redhat-marketplace-jv6gn\" (UID: \"bb45b82d-b194-4e03-981a-9de9fdd23e0a\") " pod="openshift-marketplace/redhat-marketplace-jv6gn" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.455466 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx_ed080f95-063a-4f19-bb10-7153531ca913/util/0.log" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.455994 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx_ed080f95-063a-4f19-bb10-7153531ca913/pull/0.log" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.464474 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jv6gn" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.548638 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx_ed080f95-063a-4f19-bb10-7153531ca913/pull/0.log" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.672737 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx_ed080f95-063a-4f19-bb10-7153531ca913/pull/0.log" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.708515 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx_ed080f95-063a-4f19-bb10-7153531ca913/util/0.log" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.765345 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_67123cb883bb9c7826e884724cb67a3f6a58863b6d321e954ce656ff6a48llx_ed080f95-063a-4f19-bb10-7153531ca913/extract/0.log" Sep 30 02:47:34 crc kubenswrapper[4809]: I0930 02:47:34.920936 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-4tp9f_e5433da0-c4a6-46a5-b936-bf361aca6946/kube-rbac-proxy/0.log" Sep 30 02:47:35 crc kubenswrapper[4809]: I0930 02:47:35.002290 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-4tp9f_e5433da0-c4a6-46a5-b936-bf361aca6946/manager/0.log" Sep 30 02:47:35 crc kubenswrapper[4809]: I0930 02:47:35.061909 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-2jhx6_6306e69b-dd84-4e09-b462-71a7f858d351/kube-rbac-proxy/0.log" Sep 30 02:47:35 crc kubenswrapper[4809]: I0930 02:47:35.168135 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jv6gn"] Sep 30 02:47:35 crc kubenswrapper[4809]: W0930 02:47:35.179171 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbb45b82d_b194_4e03_981a_9de9fdd23e0a.slice/crio-6f0fe4d8a3c5b3f8aa4e623e4ee06d24261e558c1d54a7df4e4dc3a2d8d042e8 WatchSource:0}: Error finding container 6f0fe4d8a3c5b3f8aa4e623e4ee06d24261e558c1d54a7df4e4dc3a2d8d042e8: Status 404 returned error can't find the container with id 6f0fe4d8a3c5b3f8aa4e623e4ee06d24261e558c1d54a7df4e4dc3a2d8d042e8 Sep 30 02:47:35 crc kubenswrapper[4809]: I0930 02:47:35.230357 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-2jhx6_6306e69b-dd84-4e09-b462-71a7f858d351/manager/0.log" Sep 30 02:47:35 crc kubenswrapper[4809]: I0930 02:47:35.239213 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jv6gn" event={"ID":"bb45b82d-b194-4e03-981a-9de9fdd23e0a","Type":"ContainerStarted","Data":"6f0fe4d8a3c5b3f8aa4e623e4ee06d24261e558c1d54a7df4e4dc3a2d8d042e8"} Sep 30 02:47:35 crc kubenswrapper[4809]: I0930 02:47:35.241689 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-6qsgb_70a9a1c8-b1be-4c24-b2da-b0d75b5aaabb/kube-rbac-proxy/0.log" Sep 30 02:47:35 crc kubenswrapper[4809]: I0930 02:47:35.355503 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-6qsgb_70a9a1c8-b1be-4c24-b2da-b0d75b5aaabb/manager/0.log" Sep 30 02:47:35 crc kubenswrapper[4809]: I0930 02:47:35.500450 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-9dbvw_11ecdd7b-edcc-44fd-9a10-ab2f58a40430/kube-rbac-proxy/0.log" Sep 30 02:47:35 crc kubenswrapper[4809]: I0930 02:47:35.533335 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-9dbvw_11ecdd7b-edcc-44fd-9a10-ab2f58a40430/manager/0.log" Sep 30 02:47:35 crc kubenswrapper[4809]: I0930 02:47:35.701140 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-jbqng_2a80b8c2-74ea-4a89-a241-026ffb8c2b1a/kube-rbac-proxy/0.log" Sep 30 02:47:35 crc kubenswrapper[4809]: I0930 02:47:35.787660 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-jbqng_2a80b8c2-74ea-4a89-a241-026ffb8c2b1a/manager/0.log" Sep 30 02:47:35 crc kubenswrapper[4809]: I0930 02:47:35.857188 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-v7sp5_0123e31b-cbb3-4545-b679-c7b27eeaebba/kube-rbac-proxy/0.log" Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.008089 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-v7sp5_0123e31b-cbb3-4545-b679-c7b27eeaebba/manager/0.log" Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.032606 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-jf4kc_aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b/kube-rbac-proxy/0.log" Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.232244 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-njdxf_a44af2a0-4faa-4b9d-a426-f137586b0844/kube-rbac-proxy/0.log" Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.254892 4809 generic.go:334] "Generic (PLEG): container finished" podID="bb45b82d-b194-4e03-981a-9de9fdd23e0a" containerID="1d6ae7faec54590e5aa2c7df11a542d6f1f56ba1136c3cfddbee9db8bdcf19d3" exitCode=0 Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.255094 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jv6gn" event={"ID":"bb45b82d-b194-4e03-981a-9de9fdd23e0a","Type":"ContainerDied","Data":"1d6ae7faec54590e5aa2c7df11a542d6f1f56ba1136c3cfddbee9db8bdcf19d3"} Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.276266 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-jf4kc_aaa0a31e-8d3a-4846-8ca9-c55a9b33cd8b/manager/0.log" Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.318096 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-njdxf_a44af2a0-4faa-4b9d-a426-f137586b0844/manager/0.log" Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.338671 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tgqz6"] Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.340917 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tgqz6" Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.415628 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tgqz6"] Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.446929 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhzxw\" (UniqueName: \"kubernetes.io/projected/831cd469-6ca2-43b7-9159-4cd36faeedbf-kube-api-access-zhzxw\") pod \"community-operators-tgqz6\" (UID: \"831cd469-6ca2-43b7-9159-4cd36faeedbf\") " pod="openshift-marketplace/community-operators-tgqz6" Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.447011 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/831cd469-6ca2-43b7-9159-4cd36faeedbf-utilities\") pod \"community-operators-tgqz6\" (UID: \"831cd469-6ca2-43b7-9159-4cd36faeedbf\") " pod="openshift-marketplace/community-operators-tgqz6" Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.447145 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/831cd469-6ca2-43b7-9159-4cd36faeedbf-catalog-content\") pod \"community-operators-tgqz6\" (UID: \"831cd469-6ca2-43b7-9159-4cd36faeedbf\") " pod="openshift-marketplace/community-operators-tgqz6" Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.548845 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/831cd469-6ca2-43b7-9159-4cd36faeedbf-catalog-content\") pod \"community-operators-tgqz6\" (UID: \"831cd469-6ca2-43b7-9159-4cd36faeedbf\") " pod="openshift-marketplace/community-operators-tgqz6" Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.549028 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhzxw\" (UniqueName: \"kubernetes.io/projected/831cd469-6ca2-43b7-9159-4cd36faeedbf-kube-api-access-zhzxw\") pod \"community-operators-tgqz6\" (UID: \"831cd469-6ca2-43b7-9159-4cd36faeedbf\") " pod="openshift-marketplace/community-operators-tgqz6" Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.549060 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/831cd469-6ca2-43b7-9159-4cd36faeedbf-utilities\") pod \"community-operators-tgqz6\" (UID: \"831cd469-6ca2-43b7-9159-4cd36faeedbf\") " pod="openshift-marketplace/community-operators-tgqz6" Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.549331 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/831cd469-6ca2-43b7-9159-4cd36faeedbf-catalog-content\") pod \"community-operators-tgqz6\" (UID: \"831cd469-6ca2-43b7-9159-4cd36faeedbf\") " pod="openshift-marketplace/community-operators-tgqz6" Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.549420 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/831cd469-6ca2-43b7-9159-4cd36faeedbf-utilities\") pod \"community-operators-tgqz6\" (UID: \"831cd469-6ca2-43b7-9159-4cd36faeedbf\") " pod="openshift-marketplace/community-operators-tgqz6" Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.568394 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhzxw\" (UniqueName: \"kubernetes.io/projected/831cd469-6ca2-43b7-9159-4cd36faeedbf-kube-api-access-zhzxw\") pod \"community-operators-tgqz6\" (UID: \"831cd469-6ca2-43b7-9159-4cd36faeedbf\") " pod="openshift-marketplace/community-operators-tgqz6" Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.675502 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-s8j8c_904e7639-6336-4012-a8f3-2b6e9c134a1f/manager/0.log" Sep 30 02:47:36 crc kubenswrapper[4809]: I0930 02:47:36.717509 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tgqz6" Sep 30 02:47:37 crc kubenswrapper[4809]: I0930 02:47:37.040365 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-7t8jm_8817a592-d1b1-4695-b0fe-71b93371e5ea/kube-rbac-proxy/0.log" Sep 30 02:47:37 crc kubenswrapper[4809]: I0930 02:47:37.052115 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-s8j8c_904e7639-6336-4012-a8f3-2b6e9c134a1f/kube-rbac-proxy/0.log" Sep 30 02:47:37 crc kubenswrapper[4809]: I0930 02:47:37.090821 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-7t8jm_8817a592-d1b1-4695-b0fe-71b93371e5ea/manager/0.log" Sep 30 02:47:37 crc kubenswrapper[4809]: I0930 02:47:37.267928 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jv6gn" event={"ID":"bb45b82d-b194-4e03-981a-9de9fdd23e0a","Type":"ContainerStarted","Data":"8a28013f400cc5fe94faf20d663c6c2a9d8436b1460e99b37fbec5b60881597b"} Sep 30 02:47:37 crc kubenswrapper[4809]: I0930 02:47:37.324043 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tgqz6"] Sep 30 02:47:37 crc kubenswrapper[4809]: W0930 02:47:37.328028 4809 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod831cd469_6ca2_43b7_9159_4cd36faeedbf.slice/crio-e0c9c5325b595970b595038a91b977aa800673a12c677735d6204261ba0bdc35 WatchSource:0}: Error finding container e0c9c5325b595970b595038a91b977aa800673a12c677735d6204261ba0bdc35: Status 404 returned error can't find the container with id e0c9c5325b595970b595038a91b977aa800673a12c677735d6204261ba0bdc35 Sep 30 02:47:37 crc kubenswrapper[4809]: I0930 02:47:37.382998 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-5fpgn_4a314eca-cb77-46bf-bebb-0a2b6910259d/kube-rbac-proxy/0.log" Sep 30 02:47:37 crc kubenswrapper[4809]: I0930 02:47:37.421352 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-5fpgn_4a314eca-cb77-46bf-bebb-0a2b6910259d/manager/0.log" Sep 30 02:47:37 crc kubenswrapper[4809]: I0930 02:47:37.526368 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-xcmjw_892fd48f-a2bf-4c2e-8167-1259635511a2/kube-rbac-proxy/0.log" Sep 30 02:47:37 crc kubenswrapper[4809]: I0930 02:47:37.653200 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-xcmjw_892fd48f-a2bf-4c2e-8167-1259635511a2/manager/0.log" Sep 30 02:47:37 crc kubenswrapper[4809]: I0930 02:47:37.715169 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-p85rd_d584c96f-2058-47bb-a205-672cebe71309/kube-rbac-proxy/0.log" Sep 30 02:47:37 crc kubenswrapper[4809]: I0930 02:47:37.896110 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-tqr46_d5b3350c-20ba-4b71-9d17-837992ec8740/kube-rbac-proxy/0.log" Sep 30 02:47:37 crc kubenswrapper[4809]: I0930 02:47:37.943283 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-tqr46_d5b3350c-20ba-4b71-9d17-837992ec8740/manager/0.log" Sep 30 02:47:37 crc kubenswrapper[4809]: I0930 02:47:37.945364 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-p85rd_d584c96f-2058-47bb-a205-672cebe71309/manager/0.log" Sep 30 02:47:38 crc kubenswrapper[4809]: I0930 02:47:38.131569 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-jjdj8_2acf945e-a5dd-457a-b4d6-4e487056b64a/kube-rbac-proxy/0.log" Sep 30 02:47:38 crc kubenswrapper[4809]: I0930 02:47:38.145282 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-jjdj8_2acf945e-a5dd-457a-b4d6-4e487056b64a/manager/0.log" Sep 30 02:47:38 crc kubenswrapper[4809]: I0930 02:47:38.279323 4809 generic.go:334] "Generic (PLEG): container finished" podID="bb45b82d-b194-4e03-981a-9de9fdd23e0a" containerID="8a28013f400cc5fe94faf20d663c6c2a9d8436b1460e99b37fbec5b60881597b" exitCode=0 Sep 30 02:47:38 crc kubenswrapper[4809]: I0930 02:47:38.279404 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jv6gn" event={"ID":"bb45b82d-b194-4e03-981a-9de9fdd23e0a","Type":"ContainerDied","Data":"8a28013f400cc5fe94faf20d663c6c2a9d8436b1460e99b37fbec5b60881597b"} Sep 30 02:47:38 crc kubenswrapper[4809]: I0930 02:47:38.281317 4809 generic.go:334] "Generic (PLEG): container finished" podID="831cd469-6ca2-43b7-9159-4cd36faeedbf" containerID="50b78da0e0eff4e20827ea5f592f9e00d212c82d8dc7269ab8296799ec48ae4a" exitCode=0 Sep 30 02:47:38 crc kubenswrapper[4809]: I0930 02:47:38.281355 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tgqz6" event={"ID":"831cd469-6ca2-43b7-9159-4cd36faeedbf","Type":"ContainerDied","Data":"50b78da0e0eff4e20827ea5f592f9e00d212c82d8dc7269ab8296799ec48ae4a"} Sep 30 02:47:38 crc kubenswrapper[4809]: I0930 02:47:38.281379 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tgqz6" event={"ID":"831cd469-6ca2-43b7-9159-4cd36faeedbf","Type":"ContainerStarted","Data":"e0c9c5325b595970b595038a91b977aa800673a12c677735d6204261ba0bdc35"} Sep 30 02:47:38 crc kubenswrapper[4809]: I0930 02:47:38.312711 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-f4db64f4c-x4j4m_b7e3889e-1f91-45ab-8431-1d406c1c4f7e/kube-rbac-proxy/0.log" Sep 30 02:47:38 crc kubenswrapper[4809]: I0930 02:47:38.521784 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6cb7c558-p8rpq_9e4a0ee0-13ef-4f72-839c-da921dc76067/kube-rbac-proxy/0.log" Sep 30 02:47:38 crc kubenswrapper[4809]: I0930 02:47:38.609609 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6cb7c558-p8rpq_9e4a0ee0-13ef-4f72-839c-da921dc76067/operator/0.log" Sep 30 02:47:38 crc kubenswrapper[4809]: I0930 02:47:38.634940 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-ftl9q_43194d20-0560-4090-b0f5-2a32b11760db/registry-server/0.log" Sep 30 02:47:38 crc kubenswrapper[4809]: I0930 02:47:38.826460 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-h8xbq_d81fb42b-3e73-46fd-a522-ed87475d4a89/kube-rbac-proxy/0.log" Sep 30 02:47:38 crc kubenswrapper[4809]: I0930 02:47:38.902479 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-h8xbq_d81fb42b-3e73-46fd-a522-ed87475d4a89/manager/0.log" Sep 30 02:47:39 crc kubenswrapper[4809]: I0930 02:47:39.073094 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-7ncwh_8ef1bec4-7f6d-472d-b92d-495e3f87ae16/kube-rbac-proxy/0.log" Sep 30 02:47:39 crc kubenswrapper[4809]: I0930 02:47:39.093705 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-7ncwh_8ef1bec4-7f6d-472d-b92d-495e3f87ae16/manager/0.log" Sep 30 02:47:39 crc kubenswrapper[4809]: I0930 02:47:39.175110 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-79d8469568-cmrm8_0638d215-6c30-4683-894b-c91bdb99affb/operator/0.log" Sep 30 02:47:39 crc kubenswrapper[4809]: I0930 02:47:39.299677 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jv6gn" event={"ID":"bb45b82d-b194-4e03-981a-9de9fdd23e0a","Type":"ContainerStarted","Data":"ade987f041393cd1f1c492eaeb31ddf32a9bb0fee6d4f8a1c7ca3477dc3364cb"} Sep 30 02:47:39 crc kubenswrapper[4809]: I0930 02:47:39.319678 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jv6gn" podStartSLOduration=2.85190003 podStartE2EDuration="5.31965953s" podCreationTimestamp="2025-09-30 02:47:34 +0000 UTC" firstStartedPulling="2025-09-30 02:47:36.259480995 +0000 UTC m=+9507.295730403" lastFinishedPulling="2025-09-30 02:47:38.727240495 +0000 UTC m=+9509.763489903" observedRunningTime="2025-09-30 02:47:39.313430741 +0000 UTC m=+9510.349680149" watchObservedRunningTime="2025-09-30 02:47:39.31965953 +0000 UTC m=+9510.355908928" Sep 30 02:47:39 crc kubenswrapper[4809]: I0930 02:47:39.336867 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-mqwrp_842e3f23-bd61-4237-8739-1ac1ccb2fd22/kube-rbac-proxy/0.log" Sep 30 02:47:39 crc kubenswrapper[4809]: I0930 02:47:39.424804 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-mqwrp_842e3f23-bd61-4237-8739-1ac1ccb2fd22/manager/0.log" Sep 30 02:47:39 crc kubenswrapper[4809]: I0930 02:47:39.668718 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-84b8546f9c-jzhxh_445fd9f7-790d-42ea-9e64-183158dc1211/kube-rbac-proxy/0.log" Sep 30 02:47:39 crc kubenswrapper[4809]: I0930 02:47:39.797480 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-qtpm9_9728ab12-7cab-4897-9ba6-b913a15c2eed/kube-rbac-proxy/0.log" Sep 30 02:47:39 crc kubenswrapper[4809]: I0930 02:47:39.960219 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-qtpm9_9728ab12-7cab-4897-9ba6-b913a15c2eed/manager/0.log" Sep 30 02:47:40 crc kubenswrapper[4809]: I0930 02:47:40.062267 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-f4db64f4c-x4j4m_b7e3889e-1f91-45ab-8431-1d406c1c4f7e/manager/0.log" Sep 30 02:47:40 crc kubenswrapper[4809]: I0930 02:47:40.063567 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-nshn9_bf650466-50bb-4a1f-8bac-0098c8127167/kube-rbac-proxy/0.log" Sep 30 02:47:40 crc kubenswrapper[4809]: I0930 02:47:40.182835 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-84b8546f9c-jzhxh_445fd9f7-790d-42ea-9e64-183158dc1211/manager/0.log" Sep 30 02:47:40 crc kubenswrapper[4809]: I0930 02:47:40.216806 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-nshn9_bf650466-50bb-4a1f-8bac-0098c8127167/manager/0.log" Sep 30 02:47:40 crc kubenswrapper[4809]: I0930 02:47:40.309530 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tgqz6" event={"ID":"831cd469-6ca2-43b7-9159-4cd36faeedbf","Type":"ContainerStarted","Data":"f56901b32eded171ad88e62c0e17ac7c8538361d41cb560dd1a9462e13a6bf5b"} Sep 30 02:47:41 crc kubenswrapper[4809]: I0930 02:47:41.319618 4809 generic.go:334] "Generic (PLEG): container finished" podID="831cd469-6ca2-43b7-9159-4cd36faeedbf" containerID="f56901b32eded171ad88e62c0e17ac7c8538361d41cb560dd1a9462e13a6bf5b" exitCode=0 Sep 30 02:47:41 crc kubenswrapper[4809]: I0930 02:47:41.319671 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tgqz6" event={"ID":"831cd469-6ca2-43b7-9159-4cd36faeedbf","Type":"ContainerDied","Data":"f56901b32eded171ad88e62c0e17ac7c8538361d41cb560dd1a9462e13a6bf5b"} Sep 30 02:47:42 crc kubenswrapper[4809]: I0930 02:47:42.331599 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tgqz6" event={"ID":"831cd469-6ca2-43b7-9159-4cd36faeedbf","Type":"ContainerStarted","Data":"8e64ced6de5fc6790fe6c091fcc2d84e597736a58e9b445ccb0b7beb2ff6c7ed"} Sep 30 02:47:42 crc kubenswrapper[4809]: I0930 02:47:42.354571 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tgqz6" podStartSLOduration=2.952114966 podStartE2EDuration="6.354552965s" podCreationTimestamp="2025-09-30 02:47:36 +0000 UTC" firstStartedPulling="2025-09-30 02:47:38.282456801 +0000 UTC m=+9509.318706209" lastFinishedPulling="2025-09-30 02:47:41.6848948 +0000 UTC m=+9512.721144208" observedRunningTime="2025-09-30 02:47:42.350040782 +0000 UTC m=+9513.386290190" watchObservedRunningTime="2025-09-30 02:47:42.354552965 +0000 UTC m=+9513.390802373" Sep 30 02:47:44 crc kubenswrapper[4809]: I0930 02:47:44.465133 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jv6gn" Sep 30 02:47:44 crc kubenswrapper[4809]: I0930 02:47:44.465813 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jv6gn" Sep 30 02:47:44 crc kubenswrapper[4809]: I0930 02:47:44.518772 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jv6gn" Sep 30 02:47:45 crc kubenswrapper[4809]: I0930 02:47:45.450083 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jv6gn" Sep 30 02:47:45 crc kubenswrapper[4809]: I0930 02:47:45.941879 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jv6gn"] Sep 30 02:47:46 crc kubenswrapper[4809]: I0930 02:47:46.718419 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tgqz6" Sep 30 02:47:46 crc kubenswrapper[4809]: I0930 02:47:46.718664 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tgqz6" Sep 30 02:47:46 crc kubenswrapper[4809]: I0930 02:47:46.773110 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tgqz6" Sep 30 02:47:47 crc kubenswrapper[4809]: I0930 02:47:47.397571 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jv6gn" podUID="bb45b82d-b194-4e03-981a-9de9fdd23e0a" containerName="registry-server" containerID="cri-o://ade987f041393cd1f1c492eaeb31ddf32a9bb0fee6d4f8a1c7ca3477dc3364cb" gracePeriod=2 Sep 30 02:47:47 crc kubenswrapper[4809]: I0930 02:47:47.476381 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tgqz6" Sep 30 02:47:47 crc kubenswrapper[4809]: I0930 02:47:47.959094 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jv6gn" Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.043807 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prtc2\" (UniqueName: \"kubernetes.io/projected/bb45b82d-b194-4e03-981a-9de9fdd23e0a-kube-api-access-prtc2\") pod \"bb45b82d-b194-4e03-981a-9de9fdd23e0a\" (UID: \"bb45b82d-b194-4e03-981a-9de9fdd23e0a\") " Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.043944 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb45b82d-b194-4e03-981a-9de9fdd23e0a-catalog-content\") pod \"bb45b82d-b194-4e03-981a-9de9fdd23e0a\" (UID: \"bb45b82d-b194-4e03-981a-9de9fdd23e0a\") " Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.044088 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb45b82d-b194-4e03-981a-9de9fdd23e0a-utilities\") pod \"bb45b82d-b194-4e03-981a-9de9fdd23e0a\" (UID: \"bb45b82d-b194-4e03-981a-9de9fdd23e0a\") " Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.044835 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb45b82d-b194-4e03-981a-9de9fdd23e0a-utilities" (OuterVolumeSpecName: "utilities") pod "bb45b82d-b194-4e03-981a-9de9fdd23e0a" (UID: "bb45b82d-b194-4e03-981a-9de9fdd23e0a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.053691 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb45b82d-b194-4e03-981a-9de9fdd23e0a-kube-api-access-prtc2" (OuterVolumeSpecName: "kube-api-access-prtc2") pod "bb45b82d-b194-4e03-981a-9de9fdd23e0a" (UID: "bb45b82d-b194-4e03-981a-9de9fdd23e0a"). InnerVolumeSpecName "kube-api-access-prtc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.061137 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb45b82d-b194-4e03-981a-9de9fdd23e0a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bb45b82d-b194-4e03-981a-9de9fdd23e0a" (UID: "bb45b82d-b194-4e03-981a-9de9fdd23e0a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.148265 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb45b82d-b194-4e03-981a-9de9fdd23e0a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.148346 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb45b82d-b194-4e03-981a-9de9fdd23e0a-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.148359 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prtc2\" (UniqueName: \"kubernetes.io/projected/bb45b82d-b194-4e03-981a-9de9fdd23e0a-kube-api-access-prtc2\") on node \"crc\" DevicePath \"\"" Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.411630 4809 generic.go:334] "Generic (PLEG): container finished" podID="bb45b82d-b194-4e03-981a-9de9fdd23e0a" containerID="ade987f041393cd1f1c492eaeb31ddf32a9bb0fee6d4f8a1c7ca3477dc3364cb" exitCode=0 Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.411710 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jv6gn" event={"ID":"bb45b82d-b194-4e03-981a-9de9fdd23e0a","Type":"ContainerDied","Data":"ade987f041393cd1f1c492eaeb31ddf32a9bb0fee6d4f8a1c7ca3477dc3364cb"} Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.411742 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jv6gn" Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.411779 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jv6gn" event={"ID":"bb45b82d-b194-4e03-981a-9de9fdd23e0a","Type":"ContainerDied","Data":"6f0fe4d8a3c5b3f8aa4e623e4ee06d24261e558c1d54a7df4e4dc3a2d8d042e8"} Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.411804 4809 scope.go:117] "RemoveContainer" containerID="ade987f041393cd1f1c492eaeb31ddf32a9bb0fee6d4f8a1c7ca3477dc3364cb" Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.441127 4809 scope.go:117] "RemoveContainer" containerID="8a28013f400cc5fe94faf20d663c6c2a9d8436b1460e99b37fbec5b60881597b" Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.450179 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jv6gn"] Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.476332 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jv6gn"] Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.492950 4809 scope.go:117] "RemoveContainer" containerID="1d6ae7faec54590e5aa2c7df11a542d6f1f56ba1136c3cfddbee9db8bdcf19d3" Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.543907 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tgqz6"] Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.554754 4809 scope.go:117] "RemoveContainer" containerID="ade987f041393cd1f1c492eaeb31ddf32a9bb0fee6d4f8a1c7ca3477dc3364cb" Sep 30 02:47:48 crc kubenswrapper[4809]: E0930 02:47:48.555194 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ade987f041393cd1f1c492eaeb31ddf32a9bb0fee6d4f8a1c7ca3477dc3364cb\": container with ID starting with ade987f041393cd1f1c492eaeb31ddf32a9bb0fee6d4f8a1c7ca3477dc3364cb not found: ID does not exist" containerID="ade987f041393cd1f1c492eaeb31ddf32a9bb0fee6d4f8a1c7ca3477dc3364cb" Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.555252 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ade987f041393cd1f1c492eaeb31ddf32a9bb0fee6d4f8a1c7ca3477dc3364cb"} err="failed to get container status \"ade987f041393cd1f1c492eaeb31ddf32a9bb0fee6d4f8a1c7ca3477dc3364cb\": rpc error: code = NotFound desc = could not find container \"ade987f041393cd1f1c492eaeb31ddf32a9bb0fee6d4f8a1c7ca3477dc3364cb\": container with ID starting with ade987f041393cd1f1c492eaeb31ddf32a9bb0fee6d4f8a1c7ca3477dc3364cb not found: ID does not exist" Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.555282 4809 scope.go:117] "RemoveContainer" containerID="8a28013f400cc5fe94faf20d663c6c2a9d8436b1460e99b37fbec5b60881597b" Sep 30 02:47:48 crc kubenswrapper[4809]: E0930 02:47:48.555531 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a28013f400cc5fe94faf20d663c6c2a9d8436b1460e99b37fbec5b60881597b\": container with ID starting with 8a28013f400cc5fe94faf20d663c6c2a9d8436b1460e99b37fbec5b60881597b not found: ID does not exist" containerID="8a28013f400cc5fe94faf20d663c6c2a9d8436b1460e99b37fbec5b60881597b" Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.555560 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a28013f400cc5fe94faf20d663c6c2a9d8436b1460e99b37fbec5b60881597b"} err="failed to get container status \"8a28013f400cc5fe94faf20d663c6c2a9d8436b1460e99b37fbec5b60881597b\": rpc error: code = NotFound desc = could not find container \"8a28013f400cc5fe94faf20d663c6c2a9d8436b1460e99b37fbec5b60881597b\": container with ID starting with 8a28013f400cc5fe94faf20d663c6c2a9d8436b1460e99b37fbec5b60881597b not found: ID does not exist" Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.555575 4809 scope.go:117] "RemoveContainer" containerID="1d6ae7faec54590e5aa2c7df11a542d6f1f56ba1136c3cfddbee9db8bdcf19d3" Sep 30 02:47:48 crc kubenswrapper[4809]: E0930 02:47:48.555818 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d6ae7faec54590e5aa2c7df11a542d6f1f56ba1136c3cfddbee9db8bdcf19d3\": container with ID starting with 1d6ae7faec54590e5aa2c7df11a542d6f1f56ba1136c3cfddbee9db8bdcf19d3 not found: ID does not exist" containerID="1d6ae7faec54590e5aa2c7df11a542d6f1f56ba1136c3cfddbee9db8bdcf19d3" Sep 30 02:47:48 crc kubenswrapper[4809]: I0930 02:47:48.555841 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d6ae7faec54590e5aa2c7df11a542d6f1f56ba1136c3cfddbee9db8bdcf19d3"} err="failed to get container status \"1d6ae7faec54590e5aa2c7df11a542d6f1f56ba1136c3cfddbee9db8bdcf19d3\": rpc error: code = NotFound desc = could not find container \"1d6ae7faec54590e5aa2c7df11a542d6f1f56ba1136c3cfddbee9db8bdcf19d3\": container with ID starting with 1d6ae7faec54590e5aa2c7df11a542d6f1f56ba1136c3cfddbee9db8bdcf19d3 not found: ID does not exist" Sep 30 02:47:49 crc kubenswrapper[4809]: I0930 02:47:49.422695 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tgqz6" podUID="831cd469-6ca2-43b7-9159-4cd36faeedbf" containerName="registry-server" containerID="cri-o://8e64ced6de5fc6790fe6c091fcc2d84e597736a58e9b445ccb0b7beb2ff6c7ed" gracePeriod=2 Sep 30 02:47:49 crc kubenswrapper[4809]: I0930 02:47:49.714551 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb45b82d-b194-4e03-981a-9de9fdd23e0a" path="/var/lib/kubelet/pods/bb45b82d-b194-4e03-981a-9de9fdd23e0a/volumes" Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.018796 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tgqz6" Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.092863 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhzxw\" (UniqueName: \"kubernetes.io/projected/831cd469-6ca2-43b7-9159-4cd36faeedbf-kube-api-access-zhzxw\") pod \"831cd469-6ca2-43b7-9159-4cd36faeedbf\" (UID: \"831cd469-6ca2-43b7-9159-4cd36faeedbf\") " Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.093105 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/831cd469-6ca2-43b7-9159-4cd36faeedbf-catalog-content\") pod \"831cd469-6ca2-43b7-9159-4cd36faeedbf\" (UID: \"831cd469-6ca2-43b7-9159-4cd36faeedbf\") " Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.093155 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/831cd469-6ca2-43b7-9159-4cd36faeedbf-utilities\") pod \"831cd469-6ca2-43b7-9159-4cd36faeedbf\" (UID: \"831cd469-6ca2-43b7-9159-4cd36faeedbf\") " Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.094150 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/831cd469-6ca2-43b7-9159-4cd36faeedbf-utilities" (OuterVolumeSpecName: "utilities") pod "831cd469-6ca2-43b7-9159-4cd36faeedbf" (UID: "831cd469-6ca2-43b7-9159-4cd36faeedbf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.110959 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/831cd469-6ca2-43b7-9159-4cd36faeedbf-kube-api-access-zhzxw" (OuterVolumeSpecName: "kube-api-access-zhzxw") pod "831cd469-6ca2-43b7-9159-4cd36faeedbf" (UID: "831cd469-6ca2-43b7-9159-4cd36faeedbf"). InnerVolumeSpecName "kube-api-access-zhzxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.149939 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/831cd469-6ca2-43b7-9159-4cd36faeedbf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "831cd469-6ca2-43b7-9159-4cd36faeedbf" (UID: "831cd469-6ca2-43b7-9159-4cd36faeedbf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.195411 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/831cd469-6ca2-43b7-9159-4cd36faeedbf-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.195451 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/831cd469-6ca2-43b7-9159-4cd36faeedbf-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.195462 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhzxw\" (UniqueName: \"kubernetes.io/projected/831cd469-6ca2-43b7-9159-4cd36faeedbf-kube-api-access-zhzxw\") on node \"crc\" DevicePath \"\"" Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.454215 4809 generic.go:334] "Generic (PLEG): container finished" podID="831cd469-6ca2-43b7-9159-4cd36faeedbf" containerID="8e64ced6de5fc6790fe6c091fcc2d84e597736a58e9b445ccb0b7beb2ff6c7ed" exitCode=0 Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.454265 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tgqz6" event={"ID":"831cd469-6ca2-43b7-9159-4cd36faeedbf","Type":"ContainerDied","Data":"8e64ced6de5fc6790fe6c091fcc2d84e597736a58e9b445ccb0b7beb2ff6c7ed"} Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.454299 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tgqz6" event={"ID":"831cd469-6ca2-43b7-9159-4cd36faeedbf","Type":"ContainerDied","Data":"e0c9c5325b595970b595038a91b977aa800673a12c677735d6204261ba0bdc35"} Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.454321 4809 scope.go:117] "RemoveContainer" containerID="8e64ced6de5fc6790fe6c091fcc2d84e597736a58e9b445ccb0b7beb2ff6c7ed" Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.454334 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tgqz6" Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.484635 4809 scope.go:117] "RemoveContainer" containerID="f56901b32eded171ad88e62c0e17ac7c8538361d41cb560dd1a9462e13a6bf5b" Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.508758 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tgqz6"] Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.511110 4809 scope.go:117] "RemoveContainer" containerID="50b78da0e0eff4e20827ea5f592f9e00d212c82d8dc7269ab8296799ec48ae4a" Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.518824 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tgqz6"] Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.568373 4809 scope.go:117] "RemoveContainer" containerID="8e64ced6de5fc6790fe6c091fcc2d84e597736a58e9b445ccb0b7beb2ff6c7ed" Sep 30 02:47:50 crc kubenswrapper[4809]: E0930 02:47:50.568808 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e64ced6de5fc6790fe6c091fcc2d84e597736a58e9b445ccb0b7beb2ff6c7ed\": container with ID starting with 8e64ced6de5fc6790fe6c091fcc2d84e597736a58e9b445ccb0b7beb2ff6c7ed not found: ID does not exist" containerID="8e64ced6de5fc6790fe6c091fcc2d84e597736a58e9b445ccb0b7beb2ff6c7ed" Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.568849 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e64ced6de5fc6790fe6c091fcc2d84e597736a58e9b445ccb0b7beb2ff6c7ed"} err="failed to get container status \"8e64ced6de5fc6790fe6c091fcc2d84e597736a58e9b445ccb0b7beb2ff6c7ed\": rpc error: code = NotFound desc = could not find container \"8e64ced6de5fc6790fe6c091fcc2d84e597736a58e9b445ccb0b7beb2ff6c7ed\": container with ID starting with 8e64ced6de5fc6790fe6c091fcc2d84e597736a58e9b445ccb0b7beb2ff6c7ed not found: ID does not exist" Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.568875 4809 scope.go:117] "RemoveContainer" containerID="f56901b32eded171ad88e62c0e17ac7c8538361d41cb560dd1a9462e13a6bf5b" Sep 30 02:47:50 crc kubenswrapper[4809]: E0930 02:47:50.569995 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f56901b32eded171ad88e62c0e17ac7c8538361d41cb560dd1a9462e13a6bf5b\": container with ID starting with f56901b32eded171ad88e62c0e17ac7c8538361d41cb560dd1a9462e13a6bf5b not found: ID does not exist" containerID="f56901b32eded171ad88e62c0e17ac7c8538361d41cb560dd1a9462e13a6bf5b" Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.570023 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f56901b32eded171ad88e62c0e17ac7c8538361d41cb560dd1a9462e13a6bf5b"} err="failed to get container status \"f56901b32eded171ad88e62c0e17ac7c8538361d41cb560dd1a9462e13a6bf5b\": rpc error: code = NotFound desc = could not find container \"f56901b32eded171ad88e62c0e17ac7c8538361d41cb560dd1a9462e13a6bf5b\": container with ID starting with f56901b32eded171ad88e62c0e17ac7c8538361d41cb560dd1a9462e13a6bf5b not found: ID does not exist" Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.570037 4809 scope.go:117] "RemoveContainer" containerID="50b78da0e0eff4e20827ea5f592f9e00d212c82d8dc7269ab8296799ec48ae4a" Sep 30 02:47:50 crc kubenswrapper[4809]: E0930 02:47:50.572446 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50b78da0e0eff4e20827ea5f592f9e00d212c82d8dc7269ab8296799ec48ae4a\": container with ID starting with 50b78da0e0eff4e20827ea5f592f9e00d212c82d8dc7269ab8296799ec48ae4a not found: ID does not exist" containerID="50b78da0e0eff4e20827ea5f592f9e00d212c82d8dc7269ab8296799ec48ae4a" Sep 30 02:47:50 crc kubenswrapper[4809]: I0930 02:47:50.572482 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50b78da0e0eff4e20827ea5f592f9e00d212c82d8dc7269ab8296799ec48ae4a"} err="failed to get container status \"50b78da0e0eff4e20827ea5f592f9e00d212c82d8dc7269ab8296799ec48ae4a\": rpc error: code = NotFound desc = could not find container \"50b78da0e0eff4e20827ea5f592f9e00d212c82d8dc7269ab8296799ec48ae4a\": container with ID starting with 50b78da0e0eff4e20827ea5f592f9e00d212c82d8dc7269ab8296799ec48ae4a not found: ID does not exist" Sep 30 02:47:51 crc kubenswrapper[4809]: I0930 02:47:51.702463 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="831cd469-6ca2-43b7-9159-4cd36faeedbf" path="/var/lib/kubelet/pods/831cd469-6ca2-43b7-9159-4cd36faeedbf/volumes" Sep 30 02:47:55 crc kubenswrapper[4809]: I0930 02:47:55.327114 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:47:55 crc kubenswrapper[4809]: I0930 02:47:55.329052 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:47:59 crc kubenswrapper[4809]: I0930 02:47:59.803346 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-f9r9k_a2031b74-3b79-422f-86c0-f4ddad880624/control-plane-machine-set-operator/0.log" Sep 30 02:48:00 crc kubenswrapper[4809]: I0930 02:48:00.038070 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-zlcqw_ca4b56e9-c63b-41a3-8182-90019963009f/kube-rbac-proxy/0.log" Sep 30 02:48:00 crc kubenswrapper[4809]: I0930 02:48:00.114857 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-zlcqw_ca4b56e9-c63b-41a3-8182-90019963009f/machine-api-operator/0.log" Sep 30 02:48:14 crc kubenswrapper[4809]: I0930 02:48:14.921459 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-wldts_639e3dbe-9ddd-484e-917a-97c8e230d4b5/cert-manager-controller/0.log" Sep 30 02:48:15 crc kubenswrapper[4809]: I0930 02:48:15.124855 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-xjbqr_fad5773f-9c6d-4050-bd1f-a3b02f66a1b5/cert-manager-cainjector/0.log" Sep 30 02:48:15 crc kubenswrapper[4809]: I0930 02:48:15.169655 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-sf4xj_eef61867-7bb5-4d67-a7b8-8d370bda1e8d/cert-manager-webhook/0.log" Sep 30 02:48:25 crc kubenswrapper[4809]: I0930 02:48:25.324830 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:48:25 crc kubenswrapper[4809]: I0930 02:48:25.325523 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:48:28 crc kubenswrapper[4809]: I0930 02:48:28.857558 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-g6zsl_a0c7c770-d362-416b-9ccf-2e1ce42a1096/nmstate-console-plugin/0.log" Sep 30 02:48:29 crc kubenswrapper[4809]: I0930 02:48:29.031513 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-qm4mw_53550ed2-303b-4654-bc54-7b6f8e992f89/kube-rbac-proxy/0.log" Sep 30 02:48:29 crc kubenswrapper[4809]: I0930 02:48:29.033031 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-t4sfn_7be99730-5c11-4b2b-b063-3500766ddfe6/nmstate-handler/0.log" Sep 30 02:48:29 crc kubenswrapper[4809]: I0930 02:48:29.076966 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-qm4mw_53550ed2-303b-4654-bc54-7b6f8e992f89/nmstate-metrics/0.log" Sep 30 02:48:29 crc kubenswrapper[4809]: I0930 02:48:29.255754 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-cx9mr_4f6a4111-9185-4e06-84ae-105096b1fc26/nmstate-webhook/0.log" Sep 30 02:48:29 crc kubenswrapper[4809]: I0930 02:48:29.271861 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-d824k_5f50e79d-ecc2-4e0b-9d07-9e1746b5f0cf/nmstate-operator/0.log" Sep 30 02:48:42 crc kubenswrapper[4809]: I0930 02:48:42.908188 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-6596b6db9-chq2m_cbc83e5f-ab0e-42be-96ed-efaa5f594ff7/kube-rbac-proxy/0.log" Sep 30 02:48:42 crc kubenswrapper[4809]: I0930 02:48:42.937421 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-6596b6db9-chq2m_cbc83e5f-ab0e-42be-96ed-efaa5f594ff7/manager/0.log" Sep 30 02:48:55 crc kubenswrapper[4809]: I0930 02:48:55.325299 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:48:55 crc kubenswrapper[4809]: I0930 02:48:55.325954 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:48:55 crc kubenswrapper[4809]: I0930 02:48:55.326016 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 02:48:55 crc kubenswrapper[4809]: I0930 02:48:55.326984 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e7e7a8a1146e984f4463d5575237a4af6a6ffa6ae453b322d152ac5a7a1ff3cd"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 02:48:55 crc kubenswrapper[4809]: I0930 02:48:55.327058 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://e7e7a8a1146e984f4463d5575237a4af6a6ffa6ae453b322d152ac5a7a1ff3cd" gracePeriod=600 Sep 30 02:48:55 crc kubenswrapper[4809]: E0930 02:48:55.533784 4809 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod526944fa_517b_47ad_abf1_75683c7f70a1.slice/crio-conmon-e7e7a8a1146e984f4463d5575237a4af6a6ffa6ae453b322d152ac5a7a1ff3cd.scope\": RecentStats: unable to find data in memory cache]" Sep 30 02:48:56 crc kubenswrapper[4809]: I0930 02:48:56.181909 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="e7e7a8a1146e984f4463d5575237a4af6a6ffa6ae453b322d152ac5a7a1ff3cd" exitCode=0 Sep 30 02:48:56 crc kubenswrapper[4809]: I0930 02:48:56.181974 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"e7e7a8a1146e984f4463d5575237a4af6a6ffa6ae453b322d152ac5a7a1ff3cd"} Sep 30 02:48:56 crc kubenswrapper[4809]: I0930 02:48:56.182303 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerStarted","Data":"b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00"} Sep 30 02:48:56 crc kubenswrapper[4809]: I0930 02:48:56.182329 4809 scope.go:117] "RemoveContainer" containerID="f8da4e05e29992ddaefa3f435c270a5e69cc5cc3cf3802f1f42daa0d76500215" Sep 30 02:48:57 crc kubenswrapper[4809]: I0930 02:48:57.743130 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_cluster-logging-operator-fcc886d58-l9g4b_bfea1e6a-7933-4546-a9b5-1d16034b57a3/cluster-logging-operator/0.log" Sep 30 02:48:57 crc kubenswrapper[4809]: I0930 02:48:57.851803 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_collector-lf4d2_7567296a-f03a-4732-8c0c-ab0a5f6a2acb/collector/0.log" Sep 30 02:48:57 crc kubenswrapper[4809]: I0930 02:48:57.951960 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-compactor-0_2b167b44-b57b-43fe-b959-68c6d9a8dc78/loki-compactor/0.log" Sep 30 02:48:58 crc kubenswrapper[4809]: I0930 02:48:58.053612 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-distributor-67c9b4c785-5tqn7_70ea64fe-b6ed-460c-b37e-fb80fe0420af/loki-distributor/0.log" Sep 30 02:48:58 crc kubenswrapper[4809]: I0930 02:48:58.099862 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-575dfb8665-4fwzc_6f1ca034-50e2-49d4-ba08-52e3d91d463c/gateway/0.log" Sep 30 02:48:58 crc kubenswrapper[4809]: I0930 02:48:58.171097 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-575dfb8665-4fwzc_6f1ca034-50e2-49d4-ba08-52e3d91d463c/opa/0.log" Sep 30 02:48:58 crc kubenswrapper[4809]: I0930 02:48:58.300448 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-575dfb8665-dnzjd_694ba803-44df-4e1d-9236-c84411352efe/opa/0.log" Sep 30 02:48:58 crc kubenswrapper[4809]: I0930 02:48:58.300522 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-575dfb8665-dnzjd_694ba803-44df-4e1d-9236-c84411352efe/gateway/0.log" Sep 30 02:48:58 crc kubenswrapper[4809]: I0930 02:48:58.384096 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-index-gateway-0_38aa55e1-d827-448d-bf0f-11732b946db1/loki-index-gateway/0.log" Sep 30 02:48:58 crc kubenswrapper[4809]: I0930 02:48:58.620027 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-querier-7454676c57-jl2gl_5d4fd159-6ed4-43b0-a478-86029c5648b6/loki-querier/0.log" Sep 30 02:48:58 crc kubenswrapper[4809]: I0930 02:48:58.625091 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-ingester-0_f51c82f4-e2f6-4c00-8132-482f9d5b1e90/loki-ingester/0.log" Sep 30 02:48:58 crc kubenswrapper[4809]: I0930 02:48:58.750259 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-query-frontend-6b467cdd84-59gpr_a29afcf0-0d05-4bbc-9f7a-4258b35c43f8/loki-query-frontend/0.log" Sep 30 02:49:14 crc kubenswrapper[4809]: I0930 02:49:14.015143 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-8pcmd_ef59525d-d301-4af1-b9c8-26933453b3d9/kube-rbac-proxy/0.log" Sep 30 02:49:14 crc kubenswrapper[4809]: I0930 02:49:14.200264 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4wd69_be322c70-01b7-420f-b3bb-3f77a839930f/cp-frr-files/0.log" Sep 30 02:49:14 crc kubenswrapper[4809]: I0930 02:49:14.326053 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-8pcmd_ef59525d-d301-4af1-b9c8-26933453b3d9/controller/0.log" Sep 30 02:49:15 crc kubenswrapper[4809]: I0930 02:49:15.090690 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4wd69_be322c70-01b7-420f-b3bb-3f77a839930f/cp-frr-files/0.log" Sep 30 02:49:15 crc kubenswrapper[4809]: I0930 02:49:15.103783 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4wd69_be322c70-01b7-420f-b3bb-3f77a839930f/cp-metrics/0.log" Sep 30 02:49:15 crc kubenswrapper[4809]: I0930 02:49:15.125956 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4wd69_be322c70-01b7-420f-b3bb-3f77a839930f/cp-reloader/0.log" Sep 30 02:49:15 crc kubenswrapper[4809]: I0930 02:49:15.158318 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4wd69_be322c70-01b7-420f-b3bb-3f77a839930f/cp-reloader/0.log" Sep 30 02:49:15 crc kubenswrapper[4809]: I0930 02:49:15.296743 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4wd69_be322c70-01b7-420f-b3bb-3f77a839930f/cp-frr-files/0.log" Sep 30 02:49:15 crc kubenswrapper[4809]: I0930 02:49:15.324445 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4wd69_be322c70-01b7-420f-b3bb-3f77a839930f/cp-reloader/0.log" Sep 30 02:49:15 crc kubenswrapper[4809]: I0930 02:49:15.334328 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4wd69_be322c70-01b7-420f-b3bb-3f77a839930f/cp-metrics/0.log" Sep 30 02:49:15 crc kubenswrapper[4809]: I0930 02:49:15.380180 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4wd69_be322c70-01b7-420f-b3bb-3f77a839930f/cp-metrics/0.log" Sep 30 02:49:15 crc kubenswrapper[4809]: I0930 02:49:15.538823 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4wd69_be322c70-01b7-420f-b3bb-3f77a839930f/cp-frr-files/0.log" Sep 30 02:49:15 crc kubenswrapper[4809]: I0930 02:49:15.565204 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4wd69_be322c70-01b7-420f-b3bb-3f77a839930f/cp-metrics/0.log" Sep 30 02:49:15 crc kubenswrapper[4809]: I0930 02:49:15.565432 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4wd69_be322c70-01b7-420f-b3bb-3f77a839930f/cp-reloader/0.log" Sep 30 02:49:15 crc kubenswrapper[4809]: I0930 02:49:15.626498 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4wd69_be322c70-01b7-420f-b3bb-3f77a839930f/controller/0.log" Sep 30 02:49:15 crc kubenswrapper[4809]: I0930 02:49:15.755406 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4wd69_be322c70-01b7-420f-b3bb-3f77a839930f/kube-rbac-proxy/0.log" Sep 30 02:49:15 crc kubenswrapper[4809]: I0930 02:49:15.784439 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4wd69_be322c70-01b7-420f-b3bb-3f77a839930f/frr-metrics/0.log" Sep 30 02:49:15 crc kubenswrapper[4809]: I0930 02:49:15.877340 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4wd69_be322c70-01b7-420f-b3bb-3f77a839930f/kube-rbac-proxy-frr/0.log" Sep 30 02:49:15 crc kubenswrapper[4809]: I0930 02:49:15.998845 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4wd69_be322c70-01b7-420f-b3bb-3f77a839930f/reloader/0.log" Sep 30 02:49:16 crc kubenswrapper[4809]: I0930 02:49:16.155080 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-n7lvn_5267fc88-99fb-48cc-b1eb-3cd3963ce8d5/frr-k8s-webhook-server/0.log" Sep 30 02:49:16 crc kubenswrapper[4809]: I0930 02:49:16.267020 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-664f94654f-smv98_4c3e6257-d644-4970-9cb2-08af086cbe4b/manager/0.log" Sep 30 02:49:16 crc kubenswrapper[4809]: I0930 02:49:16.938953 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-656594ff57-qhpbb_2b0bb1ea-c64d-4c16-8bb9-10c2a477adcb/webhook-server/0.log" Sep 30 02:49:17 crc kubenswrapper[4809]: I0930 02:49:17.246579 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-p7464_394e51b3-8289-44ef-80b6-f14b9b56b5bd/kube-rbac-proxy/0.log" Sep 30 02:49:17 crc kubenswrapper[4809]: I0930 02:49:17.859373 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-p7464_394e51b3-8289-44ef-80b6-f14b9b56b5bd/speaker/0.log" Sep 30 02:49:18 crc kubenswrapper[4809]: I0930 02:49:18.281885 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4wd69_be322c70-01b7-420f-b3bb-3f77a839930f/frr/0.log" Sep 30 02:49:30 crc kubenswrapper[4809]: I0930 02:49:30.307126 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m_d64a9837-eab6-471a-a17e-cff4f9ed5a06/util/0.log" Sep 30 02:49:30 crc kubenswrapper[4809]: I0930 02:49:30.542123 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m_d64a9837-eab6-471a-a17e-cff4f9ed5a06/pull/0.log" Sep 30 02:49:30 crc kubenswrapper[4809]: I0930 02:49:30.547286 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m_d64a9837-eab6-471a-a17e-cff4f9ed5a06/pull/0.log" Sep 30 02:49:30 crc kubenswrapper[4809]: I0930 02:49:30.583186 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m_d64a9837-eab6-471a-a17e-cff4f9ed5a06/util/0.log" Sep 30 02:49:30 crc kubenswrapper[4809]: I0930 02:49:30.806294 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m_d64a9837-eab6-471a-a17e-cff4f9ed5a06/pull/0.log" Sep 30 02:49:30 crc kubenswrapper[4809]: I0930 02:49:30.852716 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m_d64a9837-eab6-471a-a17e-cff4f9ed5a06/util/0.log" Sep 30 02:49:30 crc kubenswrapper[4809]: I0930 02:49:30.885692 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bctjg8m_d64a9837-eab6-471a-a17e-cff4f9ed5a06/extract/0.log" Sep 30 02:49:31 crc kubenswrapper[4809]: I0930 02:49:31.058380 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw_a86f4233-76f9-4358-85e9-48cc83757d13/util/0.log" Sep 30 02:49:31 crc kubenswrapper[4809]: I0930 02:49:31.256288 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw_a86f4233-76f9-4358-85e9-48cc83757d13/util/0.log" Sep 30 02:49:31 crc kubenswrapper[4809]: I0930 02:49:31.289189 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw_a86f4233-76f9-4358-85e9-48cc83757d13/pull/0.log" Sep 30 02:49:31 crc kubenswrapper[4809]: I0930 02:49:31.324516 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw_a86f4233-76f9-4358-85e9-48cc83757d13/pull/0.log" Sep 30 02:49:31 crc kubenswrapper[4809]: I0930 02:49:31.389718 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw_a86f4233-76f9-4358-85e9-48cc83757d13/util/0.log" Sep 30 02:49:31 crc kubenswrapper[4809]: I0930 02:49:31.458629 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw_a86f4233-76f9-4358-85e9-48cc83757d13/extract/0.log" Sep 30 02:49:31 crc kubenswrapper[4809]: I0930 02:49:31.470670 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgstqw_a86f4233-76f9-4358-85e9-48cc83757d13/pull/0.log" Sep 30 02:49:31 crc kubenswrapper[4809]: I0930 02:49:31.597831 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt_86edf678-f512-4b77-830f-c3d098171d07/util/0.log" Sep 30 02:49:31 crc kubenswrapper[4809]: I0930 02:49:31.784489 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt_86edf678-f512-4b77-830f-c3d098171d07/pull/0.log" Sep 30 02:49:31 crc kubenswrapper[4809]: I0930 02:49:31.800995 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt_86edf678-f512-4b77-830f-c3d098171d07/util/0.log" Sep 30 02:49:31 crc kubenswrapper[4809]: I0930 02:49:31.807324 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt_86edf678-f512-4b77-830f-c3d098171d07/pull/0.log" Sep 30 02:49:32 crc kubenswrapper[4809]: I0930 02:49:32.033585 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt_86edf678-f512-4b77-830f-c3d098171d07/util/0.log" Sep 30 02:49:32 crc kubenswrapper[4809]: I0930 02:49:32.084235 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt_86edf678-f512-4b77-830f-c3d098171d07/extract/0.log" Sep 30 02:49:32 crc kubenswrapper[4809]: I0930 02:49:32.085346 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c096jjt_86edf678-f512-4b77-830f-c3d098171d07/pull/0.log" Sep 30 02:49:32 crc kubenswrapper[4809]: I0930 02:49:32.272895 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2mrbv_b804053a-50f4-4355-8dc6-2183cb8c964f/extract-utilities/0.log" Sep 30 02:49:32 crc kubenswrapper[4809]: I0930 02:49:32.431443 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2mrbv_b804053a-50f4-4355-8dc6-2183cb8c964f/extract-utilities/0.log" Sep 30 02:49:32 crc kubenswrapper[4809]: I0930 02:49:32.464922 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2mrbv_b804053a-50f4-4355-8dc6-2183cb8c964f/extract-content/0.log" Sep 30 02:49:32 crc kubenswrapper[4809]: I0930 02:49:32.507671 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2mrbv_b804053a-50f4-4355-8dc6-2183cb8c964f/extract-content/0.log" Sep 30 02:49:32 crc kubenswrapper[4809]: I0930 02:49:32.654102 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2mrbv_b804053a-50f4-4355-8dc6-2183cb8c964f/extract-content/0.log" Sep 30 02:49:32 crc kubenswrapper[4809]: I0930 02:49:32.683459 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2mrbv_b804053a-50f4-4355-8dc6-2183cb8c964f/extract-utilities/0.log" Sep 30 02:49:32 crc kubenswrapper[4809]: I0930 02:49:32.924998 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4pvqk_008393f8-6b7a-461e-91a1-47e58d4942d4/extract-utilities/0.log" Sep 30 02:49:33 crc kubenswrapper[4809]: I0930 02:49:33.139732 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4pvqk_008393f8-6b7a-461e-91a1-47e58d4942d4/extract-content/0.log" Sep 30 02:49:33 crc kubenswrapper[4809]: I0930 02:49:33.204207 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4pvqk_008393f8-6b7a-461e-91a1-47e58d4942d4/extract-utilities/0.log" Sep 30 02:49:33 crc kubenswrapper[4809]: I0930 02:49:33.221795 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4pvqk_008393f8-6b7a-461e-91a1-47e58d4942d4/extract-content/0.log" Sep 30 02:49:33 crc kubenswrapper[4809]: I0930 02:49:33.457998 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4pvqk_008393f8-6b7a-461e-91a1-47e58d4942d4/extract-content/0.log" Sep 30 02:49:33 crc kubenswrapper[4809]: I0930 02:49:33.493063 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4pvqk_008393f8-6b7a-461e-91a1-47e58d4942d4/extract-utilities/0.log" Sep 30 02:49:33 crc kubenswrapper[4809]: I0930 02:49:33.847972 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72_c9386292-91d5-4e1e-a63f-058cc8c88b9e/util/0.log" Sep 30 02:49:33 crc kubenswrapper[4809]: I0930 02:49:33.960797 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2mrbv_b804053a-50f4-4355-8dc6-2183cb8c964f/registry-server/0.log" Sep 30 02:49:34 crc kubenswrapper[4809]: I0930 02:49:34.056856 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72_c9386292-91d5-4e1e-a63f-058cc8c88b9e/util/0.log" Sep 30 02:49:34 crc kubenswrapper[4809]: I0930 02:49:34.088259 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72_c9386292-91d5-4e1e-a63f-058cc8c88b9e/pull/0.log" Sep 30 02:49:34 crc kubenswrapper[4809]: I0930 02:49:34.172260 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72_c9386292-91d5-4e1e-a63f-058cc8c88b9e/pull/0.log" Sep 30 02:49:34 crc kubenswrapper[4809]: I0930 02:49:34.410880 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72_c9386292-91d5-4e1e-a63f-058cc8c88b9e/util/0.log" Sep 30 02:49:34 crc kubenswrapper[4809]: I0930 02:49:34.458788 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72_c9386292-91d5-4e1e-a63f-058cc8c88b9e/extract/0.log" Sep 30 02:49:34 crc kubenswrapper[4809]: I0930 02:49:34.473100 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d9661707qb72_c9386292-91d5-4e1e-a63f-058cc8c88b9e/pull/0.log" Sep 30 02:49:34 crc kubenswrapper[4809]: I0930 02:49:34.607723 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5_afa86b55-6324-47b3-a861-8075fb31f4e1/util/0.log" Sep 30 02:49:34 crc kubenswrapper[4809]: I0930 02:49:34.814989 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5_afa86b55-6324-47b3-a861-8075fb31f4e1/pull/0.log" Sep 30 02:49:34 crc kubenswrapper[4809]: I0930 02:49:34.869522 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5_afa86b55-6324-47b3-a861-8075fb31f4e1/pull/0.log" Sep 30 02:49:34 crc kubenswrapper[4809]: I0930 02:49:34.931148 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5_afa86b55-6324-47b3-a861-8075fb31f4e1/util/0.log" Sep 30 02:49:35 crc kubenswrapper[4809]: I0930 02:49:35.084201 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5_afa86b55-6324-47b3-a861-8075fb31f4e1/pull/0.log" Sep 30 02:49:35 crc kubenswrapper[4809]: I0930 02:49:35.097884 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4pvqk_008393f8-6b7a-461e-91a1-47e58d4942d4/registry-server/0.log" Sep 30 02:49:35 crc kubenswrapper[4809]: I0930 02:49:35.143986 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5_afa86b55-6324-47b3-a861-8075fb31f4e1/util/0.log" Sep 30 02:49:35 crc kubenswrapper[4809]: I0930 02:49:35.185183 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96wnjs5_afa86b55-6324-47b3-a861-8075fb31f4e1/extract/0.log" Sep 30 02:49:35 crc kubenswrapper[4809]: I0930 02:49:35.264571 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-tcqkz_4983b18e-5198-4c30-874a-50c718310352/marketplace-operator/0.log" Sep 30 02:49:35 crc kubenswrapper[4809]: I0930 02:49:35.347433 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pskp9_9c79b2c1-6175-4968-bd68-8a8493bf2e1f/extract-utilities/0.log" Sep 30 02:49:35 crc kubenswrapper[4809]: I0930 02:49:35.503995 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pskp9_9c79b2c1-6175-4968-bd68-8a8493bf2e1f/extract-content/0.log" Sep 30 02:49:35 crc kubenswrapper[4809]: I0930 02:49:35.509085 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pskp9_9c79b2c1-6175-4968-bd68-8a8493bf2e1f/extract-utilities/0.log" Sep 30 02:49:35 crc kubenswrapper[4809]: I0930 02:49:35.529722 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pskp9_9c79b2c1-6175-4968-bd68-8a8493bf2e1f/extract-content/0.log" Sep 30 02:49:36 crc kubenswrapper[4809]: I0930 02:49:36.177016 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pskp9_9c79b2c1-6175-4968-bd68-8a8493bf2e1f/extract-content/0.log" Sep 30 02:49:36 crc kubenswrapper[4809]: I0930 02:49:36.201046 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pskp9_9c79b2c1-6175-4968-bd68-8a8493bf2e1f/extract-utilities/0.log" Sep 30 02:49:36 crc kubenswrapper[4809]: I0930 02:49:36.246716 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tqkwp_ef0a3eb8-66ff-40f0-af00-3523984c3092/extract-utilities/0.log" Sep 30 02:49:36 crc kubenswrapper[4809]: I0930 02:49:36.464322 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tqkwp_ef0a3eb8-66ff-40f0-af00-3523984c3092/extract-content/0.log" Sep 30 02:49:36 crc kubenswrapper[4809]: I0930 02:49:36.497826 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tqkwp_ef0a3eb8-66ff-40f0-af00-3523984c3092/extract-utilities/0.log" Sep 30 02:49:36 crc kubenswrapper[4809]: I0930 02:49:36.547991 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tqkwp_ef0a3eb8-66ff-40f0-af00-3523984c3092/extract-content/0.log" Sep 30 02:49:36 crc kubenswrapper[4809]: I0930 02:49:36.568957 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pskp9_9c79b2c1-6175-4968-bd68-8a8493bf2e1f/registry-server/0.log" Sep 30 02:49:36 crc kubenswrapper[4809]: I0930 02:49:36.735710 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tqkwp_ef0a3eb8-66ff-40f0-af00-3523984c3092/extract-content/0.log" Sep 30 02:49:36 crc kubenswrapper[4809]: I0930 02:49:36.821561 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tqkwp_ef0a3eb8-66ff-40f0-af00-3523984c3092/extract-utilities/0.log" Sep 30 02:49:36 crc kubenswrapper[4809]: I0930 02:49:36.963869 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tqkwp_ef0a3eb8-66ff-40f0-af00-3523984c3092/registry-server/0.log" Sep 30 02:49:50 crc kubenswrapper[4809]: I0930 02:49:50.916553 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-7c8cf85677-5tk2x_a8ae4f48-169a-409c-bae6-6a89fb1263cb/prometheus-operator/0.log" Sep 30 02:49:51 crc kubenswrapper[4809]: I0930 02:49:51.102620 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-7f4bb4f46-nkwd4_5b43be87-9a51-42ab-85cc-f193171a9682/prometheus-operator-admission-webhook/0.log" Sep 30 02:49:51 crc kubenswrapper[4809]: I0930 02:49:51.120076 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-7f4bb4f46-9ckq4_83cb98f0-24e4-42a8-a44d-e96d65c87580/prometheus-operator-admission-webhook/0.log" Sep 30 02:49:51 crc kubenswrapper[4809]: I0930 02:49:51.306998 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-cc5f78dfc-d9b5l_8eefb09b-5a65-4213-b5e8-9842cbd24fcf/operator/0.log" Sep 30 02:49:51 crc kubenswrapper[4809]: I0930 02:49:51.339915 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-ui-dashboards-6584dc9448-jw9fb_b25950e5-3b7e-49e9-bd6a-2e7c645bd468/observability-ui-dashboards/0.log" Sep 30 02:49:51 crc kubenswrapper[4809]: I0930 02:49:51.506275 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-54bc95c9fb-svnh2_4b9630a3-4470-48ab-982b-f9f9cedf52da/perses-operator/0.log" Sep 30 02:50:07 crc kubenswrapper[4809]: I0930 02:50:07.431731 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-6596b6db9-chq2m_cbc83e5f-ab0e-42be-96ed-efaa5f594ff7/kube-rbac-proxy/0.log" Sep 30 02:50:07 crc kubenswrapper[4809]: I0930 02:50:07.466500 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-6596b6db9-chq2m_cbc83e5f-ab0e-42be-96ed-efaa5f594ff7/manager/0.log" Sep 30 02:50:20 crc kubenswrapper[4809]: I0930 02:50:20.224431 4809 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fc4b5"] Sep 30 02:50:20 crc kubenswrapper[4809]: E0930 02:50:20.225616 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="831cd469-6ca2-43b7-9159-4cd36faeedbf" containerName="extract-utilities" Sep 30 02:50:20 crc kubenswrapper[4809]: I0930 02:50:20.225630 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="831cd469-6ca2-43b7-9159-4cd36faeedbf" containerName="extract-utilities" Sep 30 02:50:20 crc kubenswrapper[4809]: E0930 02:50:20.225680 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb45b82d-b194-4e03-981a-9de9fdd23e0a" containerName="registry-server" Sep 30 02:50:20 crc kubenswrapper[4809]: I0930 02:50:20.225686 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb45b82d-b194-4e03-981a-9de9fdd23e0a" containerName="registry-server" Sep 30 02:50:20 crc kubenswrapper[4809]: E0930 02:50:20.225695 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="831cd469-6ca2-43b7-9159-4cd36faeedbf" containerName="registry-server" Sep 30 02:50:20 crc kubenswrapper[4809]: I0930 02:50:20.225701 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="831cd469-6ca2-43b7-9159-4cd36faeedbf" containerName="registry-server" Sep 30 02:50:20 crc kubenswrapper[4809]: E0930 02:50:20.225718 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb45b82d-b194-4e03-981a-9de9fdd23e0a" containerName="extract-utilities" Sep 30 02:50:20 crc kubenswrapper[4809]: I0930 02:50:20.225724 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb45b82d-b194-4e03-981a-9de9fdd23e0a" containerName="extract-utilities" Sep 30 02:50:20 crc kubenswrapper[4809]: E0930 02:50:20.225737 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb45b82d-b194-4e03-981a-9de9fdd23e0a" containerName="extract-content" Sep 30 02:50:20 crc kubenswrapper[4809]: I0930 02:50:20.225742 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb45b82d-b194-4e03-981a-9de9fdd23e0a" containerName="extract-content" Sep 30 02:50:20 crc kubenswrapper[4809]: E0930 02:50:20.225759 4809 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="831cd469-6ca2-43b7-9159-4cd36faeedbf" containerName="extract-content" Sep 30 02:50:20 crc kubenswrapper[4809]: I0930 02:50:20.225765 4809 state_mem.go:107] "Deleted CPUSet assignment" podUID="831cd469-6ca2-43b7-9159-4cd36faeedbf" containerName="extract-content" Sep 30 02:50:20 crc kubenswrapper[4809]: I0930 02:50:20.225963 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="831cd469-6ca2-43b7-9159-4cd36faeedbf" containerName="registry-server" Sep 30 02:50:20 crc kubenswrapper[4809]: I0930 02:50:20.225986 4809 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb45b82d-b194-4e03-981a-9de9fdd23e0a" containerName="registry-server" Sep 30 02:50:20 crc kubenswrapper[4809]: I0930 02:50:20.229107 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fc4b5" Sep 30 02:50:20 crc kubenswrapper[4809]: I0930 02:50:20.241368 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fc4b5"] Sep 30 02:50:20 crc kubenswrapper[4809]: I0930 02:50:20.311605 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83dbdd33-ea88-46f6-8c1f-a59805d42d8e-catalog-content\") pod \"redhat-operators-fc4b5\" (UID: \"83dbdd33-ea88-46f6-8c1f-a59805d42d8e\") " pod="openshift-marketplace/redhat-operators-fc4b5" Sep 30 02:50:20 crc kubenswrapper[4809]: I0930 02:50:20.311779 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vq2mr\" (UniqueName: \"kubernetes.io/projected/83dbdd33-ea88-46f6-8c1f-a59805d42d8e-kube-api-access-vq2mr\") pod \"redhat-operators-fc4b5\" (UID: \"83dbdd33-ea88-46f6-8c1f-a59805d42d8e\") " pod="openshift-marketplace/redhat-operators-fc4b5" Sep 30 02:50:20 crc kubenswrapper[4809]: I0930 02:50:20.312176 4809 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83dbdd33-ea88-46f6-8c1f-a59805d42d8e-utilities\") pod \"redhat-operators-fc4b5\" (UID: \"83dbdd33-ea88-46f6-8c1f-a59805d42d8e\") " pod="openshift-marketplace/redhat-operators-fc4b5" Sep 30 02:50:20 crc kubenswrapper[4809]: I0930 02:50:20.413766 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83dbdd33-ea88-46f6-8c1f-a59805d42d8e-catalog-content\") pod \"redhat-operators-fc4b5\" (UID: \"83dbdd33-ea88-46f6-8c1f-a59805d42d8e\") " pod="openshift-marketplace/redhat-operators-fc4b5" Sep 30 02:50:20 crc kubenswrapper[4809]: I0930 02:50:20.413825 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vq2mr\" (UniqueName: \"kubernetes.io/projected/83dbdd33-ea88-46f6-8c1f-a59805d42d8e-kube-api-access-vq2mr\") pod \"redhat-operators-fc4b5\" (UID: \"83dbdd33-ea88-46f6-8c1f-a59805d42d8e\") " pod="openshift-marketplace/redhat-operators-fc4b5" Sep 30 02:50:20 crc kubenswrapper[4809]: I0930 02:50:20.413931 4809 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83dbdd33-ea88-46f6-8c1f-a59805d42d8e-utilities\") pod \"redhat-operators-fc4b5\" (UID: \"83dbdd33-ea88-46f6-8c1f-a59805d42d8e\") " pod="openshift-marketplace/redhat-operators-fc4b5" Sep 30 02:50:20 crc kubenswrapper[4809]: I0930 02:50:20.414412 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83dbdd33-ea88-46f6-8c1f-a59805d42d8e-utilities\") pod \"redhat-operators-fc4b5\" (UID: \"83dbdd33-ea88-46f6-8c1f-a59805d42d8e\") " pod="openshift-marketplace/redhat-operators-fc4b5" Sep 30 02:50:20 crc kubenswrapper[4809]: I0930 02:50:20.414424 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83dbdd33-ea88-46f6-8c1f-a59805d42d8e-catalog-content\") pod \"redhat-operators-fc4b5\" (UID: \"83dbdd33-ea88-46f6-8c1f-a59805d42d8e\") " pod="openshift-marketplace/redhat-operators-fc4b5" Sep 30 02:50:20 crc kubenswrapper[4809]: I0930 02:50:20.904204 4809 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vq2mr\" (UniqueName: \"kubernetes.io/projected/83dbdd33-ea88-46f6-8c1f-a59805d42d8e-kube-api-access-vq2mr\") pod \"redhat-operators-fc4b5\" (UID: \"83dbdd33-ea88-46f6-8c1f-a59805d42d8e\") " pod="openshift-marketplace/redhat-operators-fc4b5" Sep 30 02:50:21 crc kubenswrapper[4809]: I0930 02:50:21.163875 4809 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fc4b5" Sep 30 02:50:22 crc kubenswrapper[4809]: I0930 02:50:22.278016 4809 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fc4b5"] Sep 30 02:50:23 crc kubenswrapper[4809]: I0930 02:50:23.165144 4809 generic.go:334] "Generic (PLEG): container finished" podID="83dbdd33-ea88-46f6-8c1f-a59805d42d8e" containerID="c49f556a94498ca394116fe2d0bc5a1bf2a3952f3350e6246d5829cb11fef85d" exitCode=0 Sep 30 02:50:23 crc kubenswrapper[4809]: I0930 02:50:23.165185 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fc4b5" event={"ID":"83dbdd33-ea88-46f6-8c1f-a59805d42d8e","Type":"ContainerDied","Data":"c49f556a94498ca394116fe2d0bc5a1bf2a3952f3350e6246d5829cb11fef85d"} Sep 30 02:50:23 crc kubenswrapper[4809]: I0930 02:50:23.165501 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fc4b5" event={"ID":"83dbdd33-ea88-46f6-8c1f-a59805d42d8e","Type":"ContainerStarted","Data":"064238f7e7b309fe80f91d945ea7e24f6408b6aed9a6c5d9b04fede583438da9"} Sep 30 02:50:23 crc kubenswrapper[4809]: I0930 02:50:23.168261 4809 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 02:50:25 crc kubenswrapper[4809]: I0930 02:50:25.195362 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fc4b5" event={"ID":"83dbdd33-ea88-46f6-8c1f-a59805d42d8e","Type":"ContainerStarted","Data":"cefe10451d216351f7eb3c98d44b3237a2bf73689f76cd05cf218fd5a09a1b18"} Sep 30 02:50:29 crc kubenswrapper[4809]: I0930 02:50:29.249096 4809 generic.go:334] "Generic (PLEG): container finished" podID="83dbdd33-ea88-46f6-8c1f-a59805d42d8e" containerID="cefe10451d216351f7eb3c98d44b3237a2bf73689f76cd05cf218fd5a09a1b18" exitCode=0 Sep 30 02:50:29 crc kubenswrapper[4809]: I0930 02:50:29.249171 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fc4b5" event={"ID":"83dbdd33-ea88-46f6-8c1f-a59805d42d8e","Type":"ContainerDied","Data":"cefe10451d216351f7eb3c98d44b3237a2bf73689f76cd05cf218fd5a09a1b18"} Sep 30 02:50:30 crc kubenswrapper[4809]: I0930 02:50:30.260193 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fc4b5" event={"ID":"83dbdd33-ea88-46f6-8c1f-a59805d42d8e","Type":"ContainerStarted","Data":"e22ea20e498002f7ed719254c0ddbf79239a38b71dd2d64efd8fe4adc4bd1ab6"} Sep 30 02:50:30 crc kubenswrapper[4809]: I0930 02:50:30.282914 4809 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fc4b5" podStartSLOduration=3.735767868 podStartE2EDuration="10.282899841s" podCreationTimestamp="2025-09-30 02:50:20 +0000 UTC" firstStartedPulling="2025-09-30 02:50:23.167605255 +0000 UTC m=+9674.203854663" lastFinishedPulling="2025-09-30 02:50:29.714737228 +0000 UTC m=+9680.750986636" observedRunningTime="2025-09-30 02:50:30.278867881 +0000 UTC m=+9681.315117309" watchObservedRunningTime="2025-09-30 02:50:30.282899841 +0000 UTC m=+9681.319149249" Sep 30 02:50:30 crc kubenswrapper[4809]: E0930 02:50:30.587290 4809 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.129.56.24:48474->38.129.56.24:38341: write tcp 38.129.56.24:48474->38.129.56.24:38341: write: broken pipe Sep 30 02:50:31 crc kubenswrapper[4809]: I0930 02:50:31.167412 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fc4b5" Sep 30 02:50:31 crc kubenswrapper[4809]: I0930 02:50:31.167775 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fc4b5" Sep 30 02:50:33 crc kubenswrapper[4809]: I0930 02:50:33.209540 4809 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fc4b5" podUID="83dbdd33-ea88-46f6-8c1f-a59805d42d8e" containerName="registry-server" probeResult="failure" output=< Sep 30 02:50:33 crc kubenswrapper[4809]: timeout: failed to connect service ":50051" within 1s Sep 30 02:50:33 crc kubenswrapper[4809]: > Sep 30 02:50:41 crc kubenswrapper[4809]: I0930 02:50:41.226315 4809 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fc4b5" Sep 30 02:50:41 crc kubenswrapper[4809]: I0930 02:50:41.299779 4809 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fc4b5" Sep 30 02:50:41 crc kubenswrapper[4809]: I0930 02:50:41.489265 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fc4b5"] Sep 30 02:50:42 crc kubenswrapper[4809]: I0930 02:50:42.388938 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fc4b5" podUID="83dbdd33-ea88-46f6-8c1f-a59805d42d8e" containerName="registry-server" containerID="cri-o://e22ea20e498002f7ed719254c0ddbf79239a38b71dd2d64efd8fe4adc4bd1ab6" gracePeriod=2 Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.377397 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fc4b5" Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.400102 4809 generic.go:334] "Generic (PLEG): container finished" podID="83dbdd33-ea88-46f6-8c1f-a59805d42d8e" containerID="e22ea20e498002f7ed719254c0ddbf79239a38b71dd2d64efd8fe4adc4bd1ab6" exitCode=0 Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.400143 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fc4b5" event={"ID":"83dbdd33-ea88-46f6-8c1f-a59805d42d8e","Type":"ContainerDied","Data":"e22ea20e498002f7ed719254c0ddbf79239a38b71dd2d64efd8fe4adc4bd1ab6"} Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.400176 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fc4b5" event={"ID":"83dbdd33-ea88-46f6-8c1f-a59805d42d8e","Type":"ContainerDied","Data":"064238f7e7b309fe80f91d945ea7e24f6408b6aed9a6c5d9b04fede583438da9"} Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.400195 4809 scope.go:117] "RemoveContainer" containerID="e22ea20e498002f7ed719254c0ddbf79239a38b71dd2d64efd8fe4adc4bd1ab6" Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.400201 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fc4b5" Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.435279 4809 scope.go:117] "RemoveContainer" containerID="cefe10451d216351f7eb3c98d44b3237a2bf73689f76cd05cf218fd5a09a1b18" Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.458426 4809 scope.go:117] "RemoveContainer" containerID="c49f556a94498ca394116fe2d0bc5a1bf2a3952f3350e6246d5829cb11fef85d" Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.516801 4809 scope.go:117] "RemoveContainer" containerID="e22ea20e498002f7ed719254c0ddbf79239a38b71dd2d64efd8fe4adc4bd1ab6" Sep 30 02:50:43 crc kubenswrapper[4809]: E0930 02:50:43.517146 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e22ea20e498002f7ed719254c0ddbf79239a38b71dd2d64efd8fe4adc4bd1ab6\": container with ID starting with e22ea20e498002f7ed719254c0ddbf79239a38b71dd2d64efd8fe4adc4bd1ab6 not found: ID does not exist" containerID="e22ea20e498002f7ed719254c0ddbf79239a38b71dd2d64efd8fe4adc4bd1ab6" Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.517189 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e22ea20e498002f7ed719254c0ddbf79239a38b71dd2d64efd8fe4adc4bd1ab6"} err="failed to get container status \"e22ea20e498002f7ed719254c0ddbf79239a38b71dd2d64efd8fe4adc4bd1ab6\": rpc error: code = NotFound desc = could not find container \"e22ea20e498002f7ed719254c0ddbf79239a38b71dd2d64efd8fe4adc4bd1ab6\": container with ID starting with e22ea20e498002f7ed719254c0ddbf79239a38b71dd2d64efd8fe4adc4bd1ab6 not found: ID does not exist" Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.517217 4809 scope.go:117] "RemoveContainer" containerID="cefe10451d216351f7eb3c98d44b3237a2bf73689f76cd05cf218fd5a09a1b18" Sep 30 02:50:43 crc kubenswrapper[4809]: E0930 02:50:43.517539 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cefe10451d216351f7eb3c98d44b3237a2bf73689f76cd05cf218fd5a09a1b18\": container with ID starting with cefe10451d216351f7eb3c98d44b3237a2bf73689f76cd05cf218fd5a09a1b18 not found: ID does not exist" containerID="cefe10451d216351f7eb3c98d44b3237a2bf73689f76cd05cf218fd5a09a1b18" Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.517570 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cefe10451d216351f7eb3c98d44b3237a2bf73689f76cd05cf218fd5a09a1b18"} err="failed to get container status \"cefe10451d216351f7eb3c98d44b3237a2bf73689f76cd05cf218fd5a09a1b18\": rpc error: code = NotFound desc = could not find container \"cefe10451d216351f7eb3c98d44b3237a2bf73689f76cd05cf218fd5a09a1b18\": container with ID starting with cefe10451d216351f7eb3c98d44b3237a2bf73689f76cd05cf218fd5a09a1b18 not found: ID does not exist" Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.517591 4809 scope.go:117] "RemoveContainer" containerID="c49f556a94498ca394116fe2d0bc5a1bf2a3952f3350e6246d5829cb11fef85d" Sep 30 02:50:43 crc kubenswrapper[4809]: E0930 02:50:43.517991 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c49f556a94498ca394116fe2d0bc5a1bf2a3952f3350e6246d5829cb11fef85d\": container with ID starting with c49f556a94498ca394116fe2d0bc5a1bf2a3952f3350e6246d5829cb11fef85d not found: ID does not exist" containerID="c49f556a94498ca394116fe2d0bc5a1bf2a3952f3350e6246d5829cb11fef85d" Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.518049 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c49f556a94498ca394116fe2d0bc5a1bf2a3952f3350e6246d5829cb11fef85d"} err="failed to get container status \"c49f556a94498ca394116fe2d0bc5a1bf2a3952f3350e6246d5829cb11fef85d\": rpc error: code = NotFound desc = could not find container \"c49f556a94498ca394116fe2d0bc5a1bf2a3952f3350e6246d5829cb11fef85d\": container with ID starting with c49f556a94498ca394116fe2d0bc5a1bf2a3952f3350e6246d5829cb11fef85d not found: ID does not exist" Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.523288 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vq2mr\" (UniqueName: \"kubernetes.io/projected/83dbdd33-ea88-46f6-8c1f-a59805d42d8e-kube-api-access-vq2mr\") pod \"83dbdd33-ea88-46f6-8c1f-a59805d42d8e\" (UID: \"83dbdd33-ea88-46f6-8c1f-a59805d42d8e\") " Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.523467 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83dbdd33-ea88-46f6-8c1f-a59805d42d8e-catalog-content\") pod \"83dbdd33-ea88-46f6-8c1f-a59805d42d8e\" (UID: \"83dbdd33-ea88-46f6-8c1f-a59805d42d8e\") " Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.526885 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83dbdd33-ea88-46f6-8c1f-a59805d42d8e-utilities\") pod \"83dbdd33-ea88-46f6-8c1f-a59805d42d8e\" (UID: \"83dbdd33-ea88-46f6-8c1f-a59805d42d8e\") " Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.527786 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83dbdd33-ea88-46f6-8c1f-a59805d42d8e-utilities" (OuterVolumeSpecName: "utilities") pod "83dbdd33-ea88-46f6-8c1f-a59805d42d8e" (UID: "83dbdd33-ea88-46f6-8c1f-a59805d42d8e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.528032 4809 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83dbdd33-ea88-46f6-8c1f-a59805d42d8e-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.537241 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83dbdd33-ea88-46f6-8c1f-a59805d42d8e-kube-api-access-vq2mr" (OuterVolumeSpecName: "kube-api-access-vq2mr") pod "83dbdd33-ea88-46f6-8c1f-a59805d42d8e" (UID: "83dbdd33-ea88-46f6-8c1f-a59805d42d8e"). InnerVolumeSpecName "kube-api-access-vq2mr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.607785 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83dbdd33-ea88-46f6-8c1f-a59805d42d8e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "83dbdd33-ea88-46f6-8c1f-a59805d42d8e" (UID: "83dbdd33-ea88-46f6-8c1f-a59805d42d8e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.631806 4809 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83dbdd33-ea88-46f6-8c1f-a59805d42d8e-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.631834 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vq2mr\" (UniqueName: \"kubernetes.io/projected/83dbdd33-ea88-46f6-8c1f-a59805d42d8e-kube-api-access-vq2mr\") on node \"crc\" DevicePath \"\"" Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.749810 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fc4b5"] Sep 30 02:50:43 crc kubenswrapper[4809]: I0930 02:50:43.761447 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fc4b5"] Sep 30 02:50:45 crc kubenswrapper[4809]: I0930 02:50:45.704541 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83dbdd33-ea88-46f6-8c1f-a59805d42d8e" path="/var/lib/kubelet/pods/83dbdd33-ea88-46f6-8c1f-a59805d42d8e/volumes" Sep 30 02:50:55 crc kubenswrapper[4809]: I0930 02:50:55.325414 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:50:55 crc kubenswrapper[4809]: I0930 02:50:55.326186 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:51:25 crc kubenswrapper[4809]: I0930 02:51:25.325733 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:51:25 crc kubenswrapper[4809]: I0930 02:51:25.326554 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:51:55 crc kubenswrapper[4809]: I0930 02:51:55.324980 4809 patch_prober.go:28] interesting pod/machine-config-daemon-2zlhx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 02:51:55 crc kubenswrapper[4809]: I0930 02:51:55.325621 4809 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 02:51:55 crc kubenswrapper[4809]: I0930 02:51:55.325710 4809 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" Sep 30 02:51:55 crc kubenswrapper[4809]: I0930 02:51:55.327162 4809 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00"} pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 02:51:55 crc kubenswrapper[4809]: I0930 02:51:55.327290 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" containerName="machine-config-daemon" containerID="cri-o://b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00" gracePeriod=600 Sep 30 02:51:55 crc kubenswrapper[4809]: E0930 02:51:55.483142 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:51:56 crc kubenswrapper[4809]: I0930 02:51:56.317250 4809 generic.go:334] "Generic (PLEG): container finished" podID="526944fa-517b-47ad-abf1-75683c7f70a1" containerID="b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00" exitCode=0 Sep 30 02:51:56 crc kubenswrapper[4809]: I0930 02:51:56.317672 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" event={"ID":"526944fa-517b-47ad-abf1-75683c7f70a1","Type":"ContainerDied","Data":"b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00"} Sep 30 02:51:56 crc kubenswrapper[4809]: I0930 02:51:56.317712 4809 scope.go:117] "RemoveContainer" containerID="e7e7a8a1146e984f4463d5575237a4af6a6ffa6ae453b322d152ac5a7a1ff3cd" Sep 30 02:51:56 crc kubenswrapper[4809]: I0930 02:51:56.318620 4809 scope.go:117] "RemoveContainer" containerID="b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00" Sep 30 02:51:56 crc kubenswrapper[4809]: E0930 02:51:56.319002 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:52:11 crc kubenswrapper[4809]: I0930 02:52:11.691329 4809 scope.go:117] "RemoveContainer" containerID="b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00" Sep 30 02:52:11 crc kubenswrapper[4809]: E0930 02:52:11.692432 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:52:24 crc kubenswrapper[4809]: I0930 02:52:24.690953 4809 scope.go:117] "RemoveContainer" containerID="b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00" Sep 30 02:52:24 crc kubenswrapper[4809]: E0930 02:52:24.691788 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:52:36 crc kubenswrapper[4809]: I0930 02:52:36.691007 4809 scope.go:117] "RemoveContainer" containerID="b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00" Sep 30 02:52:36 crc kubenswrapper[4809]: E0930 02:52:36.691877 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:52:39 crc kubenswrapper[4809]: I0930 02:52:39.870540 4809 generic.go:334] "Generic (PLEG): container finished" podID="e2a3c2cf-805e-4187-9f1c-064f7732236d" containerID="6ab9a517d9ac2bae577545c9198dbd0d058503f5bba479aeabf958a0d25de348" exitCode=0 Sep 30 02:52:39 crc kubenswrapper[4809]: I0930 02:52:39.870753 4809 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cwlbq/must-gather-b6znr" event={"ID":"e2a3c2cf-805e-4187-9f1c-064f7732236d","Type":"ContainerDied","Data":"6ab9a517d9ac2bae577545c9198dbd0d058503f5bba479aeabf958a0d25de348"} Sep 30 02:52:39 crc kubenswrapper[4809]: I0930 02:52:39.872069 4809 scope.go:117] "RemoveContainer" containerID="6ab9a517d9ac2bae577545c9198dbd0d058503f5bba479aeabf958a0d25de348" Sep 30 02:52:40 crc kubenswrapper[4809]: I0930 02:52:40.940095 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-cwlbq_must-gather-b6znr_e2a3c2cf-805e-4187-9f1c-064f7732236d/gather/0.log" Sep 30 02:52:47 crc kubenswrapper[4809]: I0930 02:52:47.691638 4809 scope.go:117] "RemoveContainer" containerID="b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00" Sep 30 02:52:47 crc kubenswrapper[4809]: E0930 02:52:47.693060 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:52:52 crc kubenswrapper[4809]: I0930 02:52:52.380502 4809 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-cwlbq/must-gather-b6znr"] Sep 30 02:52:52 crc kubenswrapper[4809]: I0930 02:52:52.381323 4809 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-cwlbq/must-gather-b6znr" podUID="e2a3c2cf-805e-4187-9f1c-064f7732236d" containerName="copy" containerID="cri-o://5a3d56e484ccea159020c9187794d522c86c01d518b56d16a23a0d8e2474d49b" gracePeriod=2 Sep 30 02:52:52 crc kubenswrapper[4809]: I0930 02:52:52.390978 4809 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-cwlbq/must-gather-b6znr"] Sep 30 02:52:52 crc kubenswrapper[4809]: I0930 02:52:52.870284 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-cwlbq_must-gather-b6znr_e2a3c2cf-805e-4187-9f1c-064f7732236d/copy/0.log" Sep 30 02:52:52 crc kubenswrapper[4809]: I0930 02:52:52.872249 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cwlbq/must-gather-b6znr" Sep 30 02:52:52 crc kubenswrapper[4809]: I0930 02:52:52.879566 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhf6l\" (UniqueName: \"kubernetes.io/projected/e2a3c2cf-805e-4187-9f1c-064f7732236d-kube-api-access-bhf6l\") pod \"e2a3c2cf-805e-4187-9f1c-064f7732236d\" (UID: \"e2a3c2cf-805e-4187-9f1c-064f7732236d\") " Sep 30 02:52:52 crc kubenswrapper[4809]: I0930 02:52:52.879606 4809 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e2a3c2cf-805e-4187-9f1c-064f7732236d-must-gather-output\") pod \"e2a3c2cf-805e-4187-9f1c-064f7732236d\" (UID: \"e2a3c2cf-805e-4187-9f1c-064f7732236d\") " Sep 30 02:52:52 crc kubenswrapper[4809]: I0930 02:52:52.891966 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2a3c2cf-805e-4187-9f1c-064f7732236d-kube-api-access-bhf6l" (OuterVolumeSpecName: "kube-api-access-bhf6l") pod "e2a3c2cf-805e-4187-9f1c-064f7732236d" (UID: "e2a3c2cf-805e-4187-9f1c-064f7732236d"). InnerVolumeSpecName "kube-api-access-bhf6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 02:52:52 crc kubenswrapper[4809]: I0930 02:52:52.981289 4809 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhf6l\" (UniqueName: \"kubernetes.io/projected/e2a3c2cf-805e-4187-9f1c-064f7732236d-kube-api-access-bhf6l\") on node \"crc\" DevicePath \"\"" Sep 30 02:52:53 crc kubenswrapper[4809]: I0930 02:52:53.073624 4809 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-cwlbq_must-gather-b6znr_e2a3c2cf-805e-4187-9f1c-064f7732236d/copy/0.log" Sep 30 02:52:53 crc kubenswrapper[4809]: I0930 02:52:53.074410 4809 generic.go:334] "Generic (PLEG): container finished" podID="e2a3c2cf-805e-4187-9f1c-064f7732236d" containerID="5a3d56e484ccea159020c9187794d522c86c01d518b56d16a23a0d8e2474d49b" exitCode=143 Sep 30 02:52:53 crc kubenswrapper[4809]: I0930 02:52:53.074464 4809 scope.go:117] "RemoveContainer" containerID="5a3d56e484ccea159020c9187794d522c86c01d518b56d16a23a0d8e2474d49b" Sep 30 02:52:53 crc kubenswrapper[4809]: I0930 02:52:53.074618 4809 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cwlbq/must-gather-b6znr" Sep 30 02:52:53 crc kubenswrapper[4809]: I0930 02:52:53.145842 4809 scope.go:117] "RemoveContainer" containerID="6ab9a517d9ac2bae577545c9198dbd0d058503f5bba479aeabf958a0d25de348" Sep 30 02:52:53 crc kubenswrapper[4809]: I0930 02:52:53.166032 4809 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2a3c2cf-805e-4187-9f1c-064f7732236d-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "e2a3c2cf-805e-4187-9f1c-064f7732236d" (UID: "e2a3c2cf-805e-4187-9f1c-064f7732236d"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 02:52:53 crc kubenswrapper[4809]: I0930 02:52:53.187219 4809 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e2a3c2cf-805e-4187-9f1c-064f7732236d-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 30 02:52:53 crc kubenswrapper[4809]: I0930 02:52:53.294608 4809 scope.go:117] "RemoveContainer" containerID="5a3d56e484ccea159020c9187794d522c86c01d518b56d16a23a0d8e2474d49b" Sep 30 02:52:53 crc kubenswrapper[4809]: E0930 02:52:53.299600 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a3d56e484ccea159020c9187794d522c86c01d518b56d16a23a0d8e2474d49b\": container with ID starting with 5a3d56e484ccea159020c9187794d522c86c01d518b56d16a23a0d8e2474d49b not found: ID does not exist" containerID="5a3d56e484ccea159020c9187794d522c86c01d518b56d16a23a0d8e2474d49b" Sep 30 02:52:53 crc kubenswrapper[4809]: I0930 02:52:53.299662 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a3d56e484ccea159020c9187794d522c86c01d518b56d16a23a0d8e2474d49b"} err="failed to get container status \"5a3d56e484ccea159020c9187794d522c86c01d518b56d16a23a0d8e2474d49b\": rpc error: code = NotFound desc = could not find container \"5a3d56e484ccea159020c9187794d522c86c01d518b56d16a23a0d8e2474d49b\": container with ID starting with 5a3d56e484ccea159020c9187794d522c86c01d518b56d16a23a0d8e2474d49b not found: ID does not exist" Sep 30 02:52:53 crc kubenswrapper[4809]: I0930 02:52:53.299691 4809 scope.go:117] "RemoveContainer" containerID="6ab9a517d9ac2bae577545c9198dbd0d058503f5bba479aeabf958a0d25de348" Sep 30 02:52:53 crc kubenswrapper[4809]: E0930 02:52:53.300227 4809 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ab9a517d9ac2bae577545c9198dbd0d058503f5bba479aeabf958a0d25de348\": container with ID starting with 6ab9a517d9ac2bae577545c9198dbd0d058503f5bba479aeabf958a0d25de348 not found: ID does not exist" containerID="6ab9a517d9ac2bae577545c9198dbd0d058503f5bba479aeabf958a0d25de348" Sep 30 02:52:53 crc kubenswrapper[4809]: I0930 02:52:53.300245 4809 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ab9a517d9ac2bae577545c9198dbd0d058503f5bba479aeabf958a0d25de348"} err="failed to get container status \"6ab9a517d9ac2bae577545c9198dbd0d058503f5bba479aeabf958a0d25de348\": rpc error: code = NotFound desc = could not find container \"6ab9a517d9ac2bae577545c9198dbd0d058503f5bba479aeabf958a0d25de348\": container with ID starting with 6ab9a517d9ac2bae577545c9198dbd0d058503f5bba479aeabf958a0d25de348 not found: ID does not exist" Sep 30 02:52:53 crc kubenswrapper[4809]: I0930 02:52:53.705265 4809 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2a3c2cf-805e-4187-9f1c-064f7732236d" path="/var/lib/kubelet/pods/e2a3c2cf-805e-4187-9f1c-064f7732236d/volumes" Sep 30 02:53:03 crc kubenswrapper[4809]: I0930 02:53:03.691438 4809 scope.go:117] "RemoveContainer" containerID="b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00" Sep 30 02:53:03 crc kubenswrapper[4809]: E0930 02:53:03.692429 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:53:14 crc kubenswrapper[4809]: I0930 02:53:14.691077 4809 scope.go:117] "RemoveContainer" containerID="b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00" Sep 30 02:53:14 crc kubenswrapper[4809]: E0930 02:53:14.692033 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:53:22 crc kubenswrapper[4809]: I0930 02:53:22.675729 4809 scope.go:117] "RemoveContainer" containerID="2f956dc7c7cc1cf14f93114e4d5b86bafee2b53405e4b05b3afa5a8e86973f3c" Sep 30 02:53:27 crc kubenswrapper[4809]: I0930 02:53:27.692162 4809 scope.go:117] "RemoveContainer" containerID="b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00" Sep 30 02:53:27 crc kubenswrapper[4809]: E0930 02:53:27.693212 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:53:38 crc kubenswrapper[4809]: I0930 02:53:38.692010 4809 scope.go:117] "RemoveContainer" containerID="b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00" Sep 30 02:53:38 crc kubenswrapper[4809]: E0930 02:53:38.693800 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:53:53 crc kubenswrapper[4809]: I0930 02:53:53.697184 4809 scope.go:117] "RemoveContainer" containerID="b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00" Sep 30 02:53:53 crc kubenswrapper[4809]: E0930 02:53:53.700410 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:54:08 crc kubenswrapper[4809]: I0930 02:54:08.691348 4809 scope.go:117] "RemoveContainer" containerID="b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00" Sep 30 02:54:08 crc kubenswrapper[4809]: E0930 02:54:08.692899 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:54:19 crc kubenswrapper[4809]: I0930 02:54:19.710100 4809 scope.go:117] "RemoveContainer" containerID="b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00" Sep 30 02:54:19 crc kubenswrapper[4809]: E0930 02:54:19.711088 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:54:33 crc kubenswrapper[4809]: I0930 02:54:33.691186 4809 scope.go:117] "RemoveContainer" containerID="b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00" Sep 30 02:54:33 crc kubenswrapper[4809]: E0930 02:54:33.692041 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:54:48 crc kubenswrapper[4809]: I0930 02:54:48.691212 4809 scope.go:117] "RemoveContainer" containerID="b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00" Sep 30 02:54:48 crc kubenswrapper[4809]: E0930 02:54:48.692292 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:54:59 crc kubenswrapper[4809]: I0930 02:54:59.712524 4809 scope.go:117] "RemoveContainer" containerID="b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00" Sep 30 02:54:59 crc kubenswrapper[4809]: E0930 02:54:59.714032 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:55:10 crc kubenswrapper[4809]: I0930 02:55:10.697573 4809 scope.go:117] "RemoveContainer" containerID="b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00" Sep 30 02:55:10 crc kubenswrapper[4809]: E0930 02:55:10.699393 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" Sep 30 02:55:22 crc kubenswrapper[4809]: I0930 02:55:22.691198 4809 scope.go:117] "RemoveContainer" containerID="b5b070a7cfac8a6b6548c376dcdbc4a34202c0afb93108629a962ffbdaeb0a00" Sep 30 02:55:22 crc kubenswrapper[4809]: E0930 02:55:22.691877 4809 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2zlhx_openshift-machine-config-operator(526944fa-517b-47ad-abf1-75683c7f70a1)\"" pod="openshift-machine-config-operator/machine-config-daemon-2zlhx" podUID="526944fa-517b-47ad-abf1-75683c7f70a1" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515066643247024462 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015066643250017371 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015066617321016514 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015066617321015464 5ustar corecore